diff --git a/dataset_info/Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail.yaml b/dataset_info/Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail.yaml index 63e87a48e55d650f6d9e341831f3147dbaeb03ce..cd2007a6d0c5c3e84580da269c12e813e563c840 100644 --- a/dataset_info/Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail.yaml +++ b/dataset_info/Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -81,44 +81,65 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take out the empty pen holder by hand, and then put the milk and - cola into the basket on the left. place the pen holders in the middle one by one, - and then put the papers one by one into the box on the right. +task_instruction: +- take out the empty pen holder by hand, and then put the milk and cola into the basket + on the left. place the pen holders in the middle one by one, and then put the papers + one by one into the box on the right. sub_tasks: -- Place the red marker pen in pen holder with the right gripper -- Place the orange marker pen in pen holder with the right gripper -- Place the waste paper in the box with the right gripper -- End -- Grasp the black marker pen with the left gripper -- Place the red marker pen in pen holder with the left gripper -- Grasp the red marker pen with the right gripper -- Place the cola can in the basket with the left gripper -- Grasp the black marker pen with the right gripper -- Grasp the orange marker pen with the right gripper -- Place the black marker pen in pen holder with the right gripper -- Place the orange marker pen in pen holder with the left gripper -- Grasp the cola can with the left gripper -- Grasp the orange marker pen with the left gripper -- Place the milk in the basket with the left gripper -- Grasp the milk with the left gripper -- Grasp the waste paper with the right gripper -- Place the black marker pen in pen holder with the left gripper -- Grasp the red marker pen with the left gripper -- 'null' +- subtask: Place the red marker pen in pen holder with the right gripper + subtask_index: 0 +- subtask: Place the orange marker pen in pen holder with the right gripper + subtask_index: 1 +- subtask: Place the waste paper in the box with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Grasp the black marker pen with the left gripper + subtask_index: 4 +- subtask: Place the red marker pen in pen holder with the left gripper + subtask_index: 5 +- subtask: Grasp the red marker pen with the right gripper + subtask_index: 6 +- subtask: Place the cola can in the basket with the left gripper + subtask_index: 7 +- subtask: Grasp the black marker pen with the right gripper + subtask_index: 8 +- subtask: Grasp the orange marker pen with the right gripper + subtask_index: 9 +- subtask: Place the black marker pen in pen holder with the right gripper + subtask_index: 10 +- subtask: Place the orange marker pen in pen holder with the left gripper + subtask_index: 11 +- subtask: Grasp the cola can with the left gripper + subtask_index: 12 +- subtask: Grasp the orange marker pen with the left gripper + subtask_index: 13 +- subtask: Place the milk in the basket with the left gripper + subtask_index: 14 +- subtask: Grasp the milk with the left gripper + subtask_index: 15 +- subtask: Grasp the waste paper with the right gripper + subtask_index: 16 +- subtask: Place the black marker pen in pen holder with the left gripper + subtask_index: 17 +- subtask: Grasp the red marker pen with the left gripper + subtask_index: 18 +- subtask: 'null' + subtask_index: 19 atomic_actions: - grasp - pick - place -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -126,13 +147,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -140,8 +158,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 117 total_frames: 274558 fps: 30 @@ -226,11 +243,9 @@ data_structure: 'Agilex_Split_Aloha_organize_desk_fail_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:116 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -503,7 +518,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -511,7 +526,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -538,219 +552,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_Agilex_Split_Aloha_organize_desk_fail - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take out the empty pen holder by hand, and then put the milk and cola into the - basket on the left. place the pen holders in the middle one by one, and then put - the papers one by one into the box on the right. - sub_tasks: - - subtask: Place the red marker pen in pen holder with the right gripper - subtask_index: 0 - - subtask: Place the orange marker pen in pen holder with the right gripper - subtask_index: 1 - - subtask: Place the waste paper in the box with the right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Grasp the black marker pen with the left gripper - subtask_index: 4 - - subtask: Place the red marker pen in pen holder with the left gripper - subtask_index: 5 - - subtask: Grasp the red marker pen with the right gripper - subtask_index: 6 - - subtask: Place the cola can in the basket with the left gripper - subtask_index: 7 - - subtask: Grasp the black marker pen with the right gripper - subtask_index: 8 - - subtask: Grasp the orange marker pen with the right gripper - subtask_index: 9 - - subtask: Place the black marker pen in pen holder with the right gripper - subtask_index: 10 - - subtask: Place the orange marker pen in pen holder with the left gripper - subtask_index: 11 - - subtask: Grasp the cola can with the left gripper - subtask_index: 12 - - subtask: Grasp the orange marker pen with the left gripper - subtask_index: 13 - - subtask: Place the milk in the basket with the left gripper - subtask_index: 14 - - subtask: Grasp the milk with the left gripper - subtask_index: 15 - - subtask: Grasp the waste paper with the right gripper - subtask_index: 16 - - subtask: Place the black marker pen in pen holder with the left gripper - subtask_index: 17 - - subtask: Grasp the red marker pen with the left gripper - subtask_index: 18 - - subtask: 'null' - subtask_index: 19 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 274558 - dataset_size: 4.17 GB - data_structure: 'Agilex_Split_Aloha_organize_desk_fail_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (105 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_classify_objects_eight.yaml b/dataset_info/Agilex_Cobot_Magic_classify_objects_eight.yaml index 69892f4ad4a5ce3ee57b6e3b0d55336c4502204b..08f01755feb91f64edb9fb4361d38de655ea0b96 100644 --- a/dataset_info/Agilex_Cobot_Magic_classify_objects_eight.yaml +++ b/dataset_info/Agilex_Cobot_Magic_classify_objects_eight.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -147,117 +147,213 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Place multiple objects separately in different baskets. +task_instruction: +- Place multiple objects separately in different baskets. sub_tasks: -- Place the pear in the light basket with right gripper -- Pick up the shampoo with left gripper -- Place the pink marker pen in the dark basket with right gripper -- Pick up the pear with left gripper -- Place the lime in the light basket with right gripper -- Place the laundry detergent in the dark basket with right gripper -- Place the mango in the light basket with right gripper -- Pick up the light brown cup with left gripper -- Place the orange in the light basket with right gripper -- Pick up the dark brown cup with left gripper -- Place the dark brown cup in the dark basket with left gripper -- Pick up the light brown cup with right gripper -- Pick up pink marker pen with right gripper -- Pick up the lime with left gripper -- Pick up kiwi with left gripper -- Pick up mango with left gripper -- Place the shampoo in the dark basket with right gripper -- Place the dark brown cup in the dark basket with right gripper -- Pick up the pomegranate with right gripper -- Pick up the pear with right gripper -- Pick up the dark brown cup with right gripper -- Pick up the red cup with right gripper -- Place the pomegranate in the light basket with left gripper -- Pick up the avocado with right gripper -- Pick up the hard facial cleanser with left gripper -- Place the Incense box in the dark basket with left gripper -- Pick up the shampoo with right gripper -- Place the banana in the light basket with right gripper -- Place the laundry detergent in the dark basket with left gripper -- Pick up the orange with right gripper -- Place the mangosteen in the light basket with right gripper -- Place the hard facial cleanser in the dark basket with right gripper -- Pick up Incense box with right gripper -- Place the red marker in the dark basket with right gripper -- Pick up the lemon with left gripper -- Place the banana in the light basket with left gripper -- Pick up Incense box with left gripper -- Pick up kiwi with right gripper -- Pick up the mango with right gripper -- Place the lemon in the light basket with left gripper -- Pick up the laundry detergent with left gripper -- Pick up toothpaste with right gripper -- Pick up the bread with right gripper -- Pick up toothpaste with left gripper -- Place the red cup in the dark basket with left gripper -- Place the red cup in the dark basket with right gripper -- Place the lemon in the light basket with right gripper -- Pick up banana with left gripper -- Place the avocado in the light basket with right gripper -- Pick up egg yolk pastry with left gripper -- Place the light brown cup in the dark basket with left gripper -- Pick up egg yolk pastry with right gripper -- Place the orange in the light basket with left gripper -- Place the hard facial cleanser in the dark basket with left gripper -- Pick up the lemon with right gripper -- Place the kiwi in the light basket with right gripper -- End -- Pick up gray cup with left gripper -- Place the pomegranate in the light basket with right gripper -- Pick up banana with right gripper -- Place the blackboard eraser in the dark basket with left gripper -- Pick up the red marker with right gripper -- Place the red marker in the dark basket with left gripper -- Place the gray cup in the dark basket with left gripper -- Pick up the red cup with left gripper -- Place the kiwi in the light basket with left gripper -- Pick up the mangosteen with left gripper -- Pick up the orange with left gripper -- Place the mangosteen in the light basket with left gripper -- Place the bread in the light basket with right gripper -- Place the toothpaste in the dark basket with right gripper -- Pick up the lime with right gripper -- Place the mango in the light basket with left gripper -- Pick up the mangosteen with right gripper -- Place the pink marker pen in the dark basket with left gripper -- Pick up the red marker with left gripper -- Place the pear in the light basket with left gripper -- Pick up blackboard eraser with right gripper -- Place the blackboard eraser in the dark basket with right gripper -- Place the shampoo in the dark basket with left gripper -- Place the egg yolk pastry in the light basket with left gripper -- Pick up the pomegranate with left gripper -- Place the toothpaste in the dark basket with left gripper -- Pick up blackboard eraser with left gripper -- Place the Incense box in the dark basket with right gripper -- Pick up the hard facial cleanser with right gripper -- Place the apple in the light basket with right gripper -- Place the avocado in the light basket with left gripper -- Pick up pink marker pen with left gripper -- Pick up the avocado with left gripper -- Pick up the laundry detergent with right gripper -- Place the egg yolk pastry in the light basket with right gripper -- Pick up the apple with right gripper -- Place the lime in the light basket with left gripper -- 'null' +- subtask: Place the pear in the light basket with right gripper + subtask_index: 0 +- subtask: Pick up the shampoo with left gripper + subtask_index: 1 +- subtask: Place the pink marker pen in the dark basket with right gripper + subtask_index: 2 +- subtask: Pick up the pear with left gripper + subtask_index: 3 +- subtask: Place the lime in the light basket with right gripper + subtask_index: 4 +- subtask: Place the laundry detergent in the dark basket with right gripper + subtask_index: 5 +- subtask: Place the mango in the light basket with right gripper + subtask_index: 6 +- subtask: Pick up the light brown cup with left gripper + subtask_index: 7 +- subtask: Place the orange in the light basket with right gripper + subtask_index: 8 +- subtask: Pick up the dark brown cup with left gripper + subtask_index: 9 +- subtask: Place the dark brown cup in the dark basket with left gripper + subtask_index: 10 +- subtask: Pick up the light brown cup with right gripper + subtask_index: 11 +- subtask: Pick up pink marker pen with right gripper + subtask_index: 12 +- subtask: Pick up the lime with left gripper + subtask_index: 13 +- subtask: Pick up kiwi with left gripper + subtask_index: 14 +- subtask: Pick up mango with left gripper + subtask_index: 15 +- subtask: Place the shampoo in the dark basket with right gripper + subtask_index: 16 +- subtask: Place the dark brown cup in the dark basket with right gripper + subtask_index: 17 +- subtask: Pick up the pomegranate with right gripper + subtask_index: 18 +- subtask: Pick up the pear with right gripper + subtask_index: 19 +- subtask: Pick up the dark brown cup with right gripper + subtask_index: 20 +- subtask: Pick up the red cup with right gripper + subtask_index: 21 +- subtask: Place the pomegranate in the light basket with left gripper + subtask_index: 22 +- subtask: Pick up the avocado with right gripper + subtask_index: 23 +- subtask: Pick up the hard facial cleanser with left gripper + subtask_index: 24 +- subtask: Place the Incense box in the dark basket with left gripper + subtask_index: 25 +- subtask: Pick up the shampoo with right gripper + subtask_index: 26 +- subtask: Place the banana in the light basket with right gripper + subtask_index: 27 +- subtask: Place the laundry detergent in the dark basket with left gripper + subtask_index: 28 +- subtask: Pick up the orange with right gripper + subtask_index: 29 +- subtask: Place the mangosteen in the light basket with right gripper + subtask_index: 30 +- subtask: Place the hard facial cleanser in the dark basket with right gripper + subtask_index: 31 +- subtask: Pick up Incense box with right gripper + subtask_index: 32 +- subtask: Place the red marker in the dark basket with right gripper + subtask_index: 33 +- subtask: Pick up the lemon with left gripper + subtask_index: 34 +- subtask: Place the banana in the light basket with left gripper + subtask_index: 35 +- subtask: Pick up Incense box with left gripper + subtask_index: 36 +- subtask: Pick up kiwi with right gripper + subtask_index: 37 +- subtask: Pick up the mango with right gripper + subtask_index: 38 +- subtask: Place the lemon in the light basket with left gripper + subtask_index: 39 +- subtask: Pick up the laundry detergent with left gripper + subtask_index: 40 +- subtask: Pick up toothpaste with right gripper + subtask_index: 41 +- subtask: Pick up the bread with right gripper + subtask_index: 42 +- subtask: Pick up toothpaste with left gripper + subtask_index: 43 +- subtask: Place the red cup in the dark basket with left gripper + subtask_index: 44 +- subtask: Place the red cup in the dark basket with right gripper + subtask_index: 45 +- subtask: Place the lemon in the light basket with right gripper + subtask_index: 46 +- subtask: Pick up banana with left gripper + subtask_index: 47 +- subtask: Place the avocado in the light basket with right gripper + subtask_index: 48 +- subtask: Pick up egg yolk pastry with left gripper + subtask_index: 49 +- subtask: Place the light brown cup in the dark basket with left gripper + subtask_index: 50 +- subtask: Pick up egg yolk pastry with right gripper + subtask_index: 51 +- subtask: Place the orange in the light basket with left gripper + subtask_index: 52 +- subtask: Place the hard facial cleanser in the dark basket with left gripper + subtask_index: 53 +- subtask: Pick up the lemon with right gripper + subtask_index: 54 +- subtask: Place the kiwi in the light basket with right gripper + subtask_index: 55 +- subtask: End + subtask_index: 56 +- subtask: Pick up gray cup with left gripper + subtask_index: 57 +- subtask: Place the pomegranate in the light basket with right gripper + subtask_index: 58 +- subtask: Pick up banana with right gripper + subtask_index: 59 +- subtask: Place the blackboard eraser in the dark basket with left gripper + subtask_index: 60 +- subtask: Pick up the red marker with right gripper + subtask_index: 61 +- subtask: Place the red marker in the dark basket with left gripper + subtask_index: 62 +- subtask: Place the gray cup in the dark basket with left gripper + subtask_index: 63 +- subtask: Pick up the red cup with left gripper + subtask_index: 64 +- subtask: Place the kiwi in the light basket with left gripper + subtask_index: 65 +- subtask: Pick up the mangosteen with left gripper + subtask_index: 66 +- subtask: Pick up the orange with left gripper + subtask_index: 67 +- subtask: Place the mangosteen in the light basket with left gripper + subtask_index: 68 +- subtask: Place the bread in the light basket with right gripper + subtask_index: 69 +- subtask: Place the toothpaste in the dark basket with right gripper + subtask_index: 70 +- subtask: Pick up the lime with right gripper + subtask_index: 71 +- subtask: Place the mango in the light basket with left gripper + subtask_index: 72 +- subtask: Pick up the mangosteen with right gripper + subtask_index: 73 +- subtask: Place the pink marker pen in the dark basket with left gripper + subtask_index: 74 +- subtask: Pick up the red marker with left gripper + subtask_index: 75 +- subtask: Place the pear in the light basket with left gripper + subtask_index: 76 +- subtask: Pick up blackboard eraser with right gripper + subtask_index: 77 +- subtask: Place the blackboard eraser in the dark basket with right gripper + subtask_index: 78 +- subtask: Place the shampoo in the dark basket with left gripper + subtask_index: 79 +- subtask: Place the egg yolk pastry in the light basket with left gripper + subtask_index: 80 +- subtask: Pick up the pomegranate with left gripper + subtask_index: 81 +- subtask: Place the toothpaste in the dark basket with left gripper + subtask_index: 82 +- subtask: Pick up blackboard eraser with left gripper + subtask_index: 83 +- subtask: Place the Incense box in the dark basket with right gripper + subtask_index: 84 +- subtask: Pick up the hard facial cleanser with right gripper + subtask_index: 85 +- subtask: Place the apple in the light basket with right gripper + subtask_index: 86 +- subtask: Place the avocado in the light basket with left gripper + subtask_index: 87 +- subtask: Pick up pink marker pen with left gripper + subtask_index: 88 +- subtask: Pick up the avocado with left gripper + subtask_index: 89 +- subtask: Pick up the laundry detergent with right gripper + subtask_index: 90 +- subtask: Place the egg yolk pastry in the light basket with right gripper + subtask_index: 91 +- subtask: Pick up the apple with right gripper + subtask_index: 92 +- subtask: Place the lime in the light basket with left gripper + subtask_index: 93 +- subtask: 'null' + subtask_index: 94 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -265,13 +361,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -279,8 +372,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 197 total_frames: 337837 fps: 30 @@ -365,11 +457,9 @@ data_structure: 'Agilex_Cobot_Magic_classify_objects_eight_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:196 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -642,7 +732,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -650,7 +740,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -677,367 +766,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_classify_objects_eight - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Place multiple objects separately in different baskets. - sub_tasks: - - subtask: Place the pear in the light basket with right gripper - subtask_index: 0 - - subtask: Pick up the shampoo with left gripper - subtask_index: 1 - - subtask: Place the pink marker pen in the dark basket with right gripper - subtask_index: 2 - - subtask: Pick up the pear with left gripper - subtask_index: 3 - - subtask: Place the lime in the light basket with right gripper - subtask_index: 4 - - subtask: Place the laundry detergent in the dark basket with right gripper - subtask_index: 5 - - subtask: Place the mango in the light basket with right gripper - subtask_index: 6 - - subtask: Pick up the light brown cup with left gripper - subtask_index: 7 - - subtask: Place the orange in the light basket with right gripper - subtask_index: 8 - - subtask: Pick up the dark brown cup with left gripper - subtask_index: 9 - - subtask: Place the dark brown cup in the dark basket with left gripper - subtask_index: 10 - - subtask: Pick up the light brown cup with right gripper - subtask_index: 11 - - subtask: Pick up pink marker pen with right gripper - subtask_index: 12 - - subtask: Pick up the lime with left gripper - subtask_index: 13 - - subtask: Pick up kiwi with left gripper - subtask_index: 14 - - subtask: Pick up mango with left gripper - subtask_index: 15 - - subtask: Place the shampoo in the dark basket with right gripper - subtask_index: 16 - - subtask: Place the dark brown cup in the dark basket with right gripper - subtask_index: 17 - - subtask: Pick up the pomegranate with right gripper - subtask_index: 18 - - subtask: Pick up the pear with right gripper - subtask_index: 19 - - subtask: Pick up the dark brown cup with right gripper - subtask_index: 20 - - subtask: Pick up the red cup with right gripper - subtask_index: 21 - - subtask: Place the pomegranate in the light basket with left gripper - subtask_index: 22 - - subtask: Pick up the avocado with right gripper - subtask_index: 23 - - subtask: Pick up the hard facial cleanser with left gripper - subtask_index: 24 - - subtask: Place the Incense box in the dark basket with left gripper - subtask_index: 25 - - subtask: Pick up the shampoo with right gripper - subtask_index: 26 - - subtask: Place the banana in the light basket with right gripper - subtask_index: 27 - - subtask: Place the laundry detergent in the dark basket with left gripper - subtask_index: 28 - - subtask: Pick up the orange with right gripper - subtask_index: 29 - - subtask: Place the mangosteen in the light basket with right gripper - subtask_index: 30 - - subtask: Place the hard facial cleanser in the dark basket with right gripper - subtask_index: 31 - - subtask: Pick up Incense box with right gripper - subtask_index: 32 - - subtask: Place the red marker in the dark basket with right gripper - subtask_index: 33 - - subtask: Pick up the lemon with left gripper - subtask_index: 34 - - subtask: Place the banana in the light basket with left gripper - subtask_index: 35 - - subtask: Pick up Incense box with left gripper - subtask_index: 36 - - subtask: Pick up kiwi with right gripper - subtask_index: 37 - - subtask: Pick up the mango with right gripper - subtask_index: 38 - - subtask: Place the lemon in the light basket with left gripper - subtask_index: 39 - - subtask: Pick up the laundry detergent with left gripper - subtask_index: 40 - - subtask: Pick up toothpaste with right gripper - subtask_index: 41 - - subtask: Pick up the bread with right gripper - subtask_index: 42 - - subtask: Pick up toothpaste with left gripper - subtask_index: 43 - - subtask: Place the red cup in the dark basket with left gripper - subtask_index: 44 - - subtask: Place the red cup in the dark basket with right gripper - subtask_index: 45 - - subtask: Place the lemon in the light basket with right gripper - subtask_index: 46 - - subtask: Pick up banana with left gripper - subtask_index: 47 - - subtask: Place the avocado in the light basket with right gripper - subtask_index: 48 - - subtask: Pick up egg yolk pastry with left gripper - subtask_index: 49 - - subtask: Place the light brown cup in the dark basket with left gripper - subtask_index: 50 - - subtask: Pick up egg yolk pastry with right gripper - subtask_index: 51 - - subtask: Place the orange in the light basket with left gripper - subtask_index: 52 - - subtask: Place the hard facial cleanser in the dark basket with left gripper - subtask_index: 53 - - subtask: Pick up the lemon with right gripper - subtask_index: 54 - - subtask: Place the kiwi in the light basket with right gripper - subtask_index: 55 - - subtask: End - subtask_index: 56 - - subtask: Pick up gray cup with left gripper - subtask_index: 57 - - subtask: Place the pomegranate in the light basket with right gripper - subtask_index: 58 - - subtask: Pick up banana with right gripper - subtask_index: 59 - - subtask: Place the blackboard eraser in the dark basket with left gripper - subtask_index: 60 - - subtask: Pick up the red marker with right gripper - subtask_index: 61 - - subtask: Place the red marker in the dark basket with left gripper - subtask_index: 62 - - subtask: Place the gray cup in the dark basket with left gripper - subtask_index: 63 - - subtask: Pick up the red cup with left gripper - subtask_index: 64 - - subtask: Place the kiwi in the light basket with left gripper - subtask_index: 65 - - subtask: Pick up the mangosteen with left gripper - subtask_index: 66 - - subtask: Pick up the orange with left gripper - subtask_index: 67 - - subtask: Place the mangosteen in the light basket with left gripper - subtask_index: 68 - - subtask: Place the bread in the light basket with right gripper - subtask_index: 69 - - subtask: Place the toothpaste in the dark basket with right gripper - subtask_index: 70 - - subtask: Pick up the lime with right gripper - subtask_index: 71 - - subtask: Place the mango in the light basket with left gripper - subtask_index: 72 - - subtask: Pick up the mangosteen with right gripper - subtask_index: 73 - - subtask: Place the pink marker pen in the dark basket with left gripper - subtask_index: 74 - - subtask: Pick up the red marker with left gripper - subtask_index: 75 - - subtask: Place the pear in the light basket with left gripper - subtask_index: 76 - - subtask: Pick up blackboard eraser with right gripper - subtask_index: 77 - - subtask: Place the blackboard eraser in the dark basket with right gripper - subtask_index: 78 - - subtask: Place the shampoo in the dark basket with left gripper - subtask_index: 79 - - subtask: Place the egg yolk pastry in the light basket with left gripper - subtask_index: 80 - - subtask: Pick up the pomegranate with left gripper - subtask_index: 81 - - subtask: Place the toothpaste in the dark basket with left gripper - subtask_index: 82 - - subtask: Pick up blackboard eraser with left gripper - subtask_index: 83 - - subtask: Place the Incense box in the dark basket with right gripper - subtask_index: 84 - - subtask: Pick up the hard facial cleanser with right gripper - subtask_index: 85 - - subtask: Place the apple in the light basket with right gripper - subtask_index: 86 - - subtask: Place the avocado in the light basket with left gripper - subtask_index: 87 - - subtask: Pick up pink marker pen with left gripper - subtask_index: 88 - - subtask: Pick up the avocado with left gripper - subtask_index: 89 - - subtask: Pick up the laundry detergent with right gripper - subtask_index: 90 - - subtask: Place the egg yolk pastry in the light basket with right gripper - subtask_index: 91 - - subtask: Pick up the apple with right gripper - subtask_index: 92 - - subtask: Place the lime in the light basket with left gripper - subtask_index: 93 - - subtask: 'null' - subtask_index: 94 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 337837 - dataset_size: 4.51 GB - data_structure: 'Agilex_Cobot_Magic_classify_objects_eight_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (185 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_classify_objects_six.yaml b/dataset_info/Agilex_Cobot_Magic_classify_objects_six.yaml index c007dea8f0a819eabd4f4009b661d82be3abe41b..459f8368eb605db552c649d712de54adecaf2a54 100644 --- a/dataset_info/Agilex_Cobot_Magic_classify_objects_six.yaml +++ b/dataset_info/Agilex_Cobot_Magic_classify_objects_six.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -87,50 +87,79 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place multiple objects separately in different baskets. +task_instruction: +- place multiple objects separately in different baskets. sub_tasks: -- Place the orange in the light basket with left gripper -- Grasp the xx with the right gripper -- Pick up the facial cleanser with left gripper -- End -- Place the facial cleanser in the dark basket with left gripper -- Place the XX into the basket on the left with the right gripper -- Place the lime in the light basket with right gripper -- Place the laundry detergent in the dark basket with right gripper -- Place the orange in the light basket with right gripper -- Pick up the lime with left gripper -- Place the XX into the basket on the right with the left gripper -- Place the XX into the basket on the left with the left gripper -- Place the XX into the basket on the right with the right gripper -- Grasp the xx with the left gripper -- Pick up the orange with left gripper -- Place the bread in the light basket with right gripper -- Pick up the laundry detergent with right gripper -- Abnormal -- Pick up the laundry detergent with left gripper -- Pick up the facial cleanser with right gripper -- Pick up the lime with right gripper -- Pick up the bread with right gripper -- Place the brown cup in the dark basket with left gripper -- Place the laundry detergent in the dark basket with left gripper -- Pick up the orange with right gripper -- Place the lime in the light basket with left gripper -- Pick up the brown cup with left gripper -- 'null' +- subtask: Place the orange in the light basket with left gripper + subtask_index: 0 +- subtask: Grasp the xx with the right gripper + subtask_index: 1 +- subtask: Pick up the facial cleanser with left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the facial cleanser in the dark basket with left gripper + subtask_index: 4 +- subtask: Place the XX into the basket on the left with the right gripper + subtask_index: 5 +- subtask: Place the lime in the light basket with right gripper + subtask_index: 6 +- subtask: Place the laundry detergent in the dark basket with right gripper + subtask_index: 7 +- subtask: Place the orange in the light basket with right gripper + subtask_index: 8 +- subtask: Pick up the lime with left gripper + subtask_index: 9 +- subtask: Place the XX into the basket on the right with the left gripper + subtask_index: 10 +- subtask: Place the XX into the basket on the left with the left gripper + subtask_index: 11 +- subtask: Place the XX into the basket on the right with the right gripper + subtask_index: 12 +- subtask: Grasp the xx with the left gripper + subtask_index: 13 +- subtask: Pick up the orange with left gripper + subtask_index: 14 +- subtask: Place the bread in the light basket with right gripper + subtask_index: 15 +- subtask: Pick up the laundry detergent with right gripper + subtask_index: 16 +- subtask: Abnormal + subtask_index: 17 +- subtask: Pick up the laundry detergent with left gripper + subtask_index: 18 +- subtask: Pick up the facial cleanser with right gripper + subtask_index: 19 +- subtask: Pick up the lime with right gripper + subtask_index: 20 +- subtask: Pick up the bread with right gripper + subtask_index: 21 +- subtask: Place the brown cup in the dark basket with left gripper + subtask_index: 22 +- subtask: Place the laundry detergent in the dark basket with left gripper + subtask_index: 23 +- subtask: Pick up the orange with right gripper + subtask_index: 24 +- subtask: Place the lime in the light basket with left gripper + subtask_index: 25 +- subtask: Pick up the brown cup with left gripper + subtask_index: 26 +- subtask: 'null' + subtask_index: 27 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -138,13 +167,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -152,8 +178,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 199 total_frames: 302506 fps: 30 @@ -238,11 +263,9 @@ data_structure: 'Agilex_Cobot_Magic_classify_objects_six_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:198 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -515,7 +538,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -523,7 +546,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -550,233 +572,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_classify_objects_six - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place multiple objects separately in different baskets. - sub_tasks: - - subtask: Place the orange in the light basket with left gripper - subtask_index: 0 - - subtask: Grasp the xx with the right gripper - subtask_index: 1 - - subtask: Pick up the facial cleanser with left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the facial cleanser in the dark basket with left gripper - subtask_index: 4 - - subtask: Place the XX into the basket on the left with the right gripper - subtask_index: 5 - - subtask: Place the lime in the light basket with right gripper - subtask_index: 6 - - subtask: Place the laundry detergent in the dark basket with right gripper - subtask_index: 7 - - subtask: Place the orange in the light basket with right gripper - subtask_index: 8 - - subtask: Pick up the lime with left gripper - subtask_index: 9 - - subtask: Place the XX into the basket on the right with the left gripper - subtask_index: 10 - - subtask: Place the XX into the basket on the left with the left gripper - subtask_index: 11 - - subtask: Place the XX into the basket on the right with the right gripper - subtask_index: 12 - - subtask: Grasp the xx with the left gripper - subtask_index: 13 - - subtask: Pick up the orange with left gripper - subtask_index: 14 - - subtask: Place the bread in the light basket with right gripper - subtask_index: 15 - - subtask: Pick up the laundry detergent with right gripper - subtask_index: 16 - - subtask: Abnormal - subtask_index: 17 - - subtask: Pick up the laundry detergent with left gripper - subtask_index: 18 - - subtask: Pick up the facial cleanser with right gripper - subtask_index: 19 - - subtask: Pick up the lime with right gripper - subtask_index: 20 - - subtask: Pick up the bread with right gripper - subtask_index: 21 - - subtask: Place the brown cup in the dark basket with left gripper - subtask_index: 22 - - subtask: Place the laundry detergent in the dark basket with left gripper - subtask_index: 23 - - subtask: Pick up the orange with right gripper - subtask_index: 24 - - subtask: Place the lime in the light basket with left gripper - subtask_index: 25 - - subtask: Pick up the brown cup with left gripper - subtask_index: 26 - - subtask: 'null' - subtask_index: 27 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 302506 - dataset_size: 3.88 GB - data_structure: 'Agilex_Cobot_Magic_classify_objects_six_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (187 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_close_drawer_bottom.yaml b/dataset_info/Agilex_Cobot_Magic_close_drawer_bottom.yaml index f02f67d2d4ae6ac5f5817d07999a33dadf9d27ee..2e31314eedbbfd2633bac62bc260ff00de27ec61 100644 --- a/dataset_info/Agilex_Cobot_Magic_close_drawer_bottom.yaml +++ b/dataset_info/Agilex_Cobot_Magic_close_drawer_bottom.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -45,25 +45,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the bottom drawer. +task_instruction: +- close the bottom drawer. sub_tasks: -- End -- Use the right gripper to contact the bottom layer of the storage cabinet -- Push the bottom drawer closed -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Use the right gripper to contact the bottom layer of the storage cabinet + subtask_index: 1 +- subtask: Push the bottom drawer closed + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - push -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +76,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +87,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 16270 fps: 30 @@ -187,11 +188,9 @@ data_structure: 'Agilex_Cobot_Magic_close_drawer_bottom_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -464,7 +463,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -472,7 +471,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -499,200 +497,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_close_drawer_bottom - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the bottom drawer. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Use the right gripper to contact the bottom layer of the storage cabinet - subtask_index: 1 - - subtask: Push the bottom drawer closed - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - push - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 16270 - dataset_size: 157.62 MB - data_structure: 'Agilex_Cobot_Magic_close_drawer_bottom_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_close_drawer_top.yaml b/dataset_info/Agilex_Cobot_Magic_close_drawer_top.yaml index 83e07169e10b95bf8c6532585b2884060dde38fb..2aa9b35199024d9caef73ccb2e29b339eac5ab8c 100644 --- a/dataset_info/Agilex_Cobot_Magic_close_drawer_top.yaml +++ b/dataset_info/Agilex_Cobot_Magic_close_drawer_top.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -45,25 +45,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the top drawer. +task_instruction: +- close the top drawer. sub_tasks: -- End -- Push the top drawer closed -- Use the right gripper to touch the topmost layer of the storage cabinet -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Push the top drawer closed + subtask_index: 1 +- subtask: Use the right gripper to touch the topmost layer of the storage cabinet + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - push -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +76,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +87,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 9800 fps: 30 @@ -187,11 +188,9 @@ data_structure: 'Agilex_Cobot_Magic_close_drawer_top_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -464,7 +463,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -472,7 +471,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -499,200 +497,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_close_drawer_top - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the top drawer. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Push the top drawer closed - subtask_index: 1 - - subtask: Use the right gripper to touch the topmost layer of the storage cabinet - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - push - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 9800 - dataset_size: 96.54 MB - data_structure: 'Agilex_Cobot_Magic_close_drawer_top_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Agilex_Cobot_Magic_close_drawer_upper.yaml b/dataset_info/Agilex_Cobot_Magic_close_drawer_upper.yaml index eb39d5a5f59f9091f2fbad94bda289f664e8e4f2..824eb88260fffd42a221091ad76a3ba481c271b5 100644 --- a/dataset_info/Agilex_Cobot_Magic_close_drawer_upper.yaml +++ b/dataset_info/Agilex_Cobot_Magic_close_drawer_upper.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -45,27 +45,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the upper drawer. +task_instruction: +- close the upper drawer. sub_tasks: -- Use the right gripper to touch the topmost layer of the storage cabinet -- Push the top drawer closed -- Use the right gripper to contact the middle shelf of the cabinet -- End -- Push the middle drawer closed -- 'null' +- subtask: Use the right gripper to touch the topmost layer of the storage cabinet + subtask_index: 0 +- subtask: Push the top drawer closed + subtask_index: 1 +- subtask: Use the right gripper to contact the middle shelf of the cabinet + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Push the middle drawer closed + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - push -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +80,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +91,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 173 total_frames: 49994 fps: 30 @@ -189,11 +192,9 @@ data_structure: 'Agilex_Cobot_Magic_close_drawer_upper_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:172 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -466,7 +467,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -474,7 +475,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -501,204 +501,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_close_drawer_upper - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the upper drawer. - sub_tasks: - - subtask: Use the right gripper to touch the topmost layer of the storage cabinet - subtask_index: 0 - - subtask: Push the top drawer closed - subtask_index: 1 - - subtask: Use the right gripper to contact the middle shelf of the cabinet - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Push the middle drawer closed - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - push - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 49994 - dataset_size: 522.40 MB - data_structure: 'Agilex_Cobot_Magic_close_drawer_upper_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (161 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_connect_block.yaml b/dataset_info/Agilex_Cobot_Magic_connect_block.yaml index abeab3fd54f060c52cb27db61b99a58e0866f2d4..a5a687d1f3813589ee5c9f45436d8bfb6d1656ad 100644 --- a/dataset_info/Agilex_Cobot_Magic_connect_block.yaml +++ b/dataset_info/Agilex_Cobot_Magic_connect_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -45,321 +45,1183 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: According to the building block template in front, assemble the - scattered blocks together. +task_instruction: +- According to the building block template in front, assemble the scattered blocks + together. sub_tasks: -- Put the yellow build block on the right of the green build block -- Put the blue build block on the behind of the green build block -- Put the yellow build block in the center of the table -- Put the blue build block on the right of the yellow build block -- Put the green build block on the red build block -- Put the yellow build block on the behind of the green build block -- Put the orange build block on the right of the yellow build block -- Put the orange build block on the right of the red build block -- Put the orange build block on the right of the blue build block -- Put the green build block on the left of the blue build block -- Put the blue build block on the left of the yellow build block -- Put the yellow build block on the right of the green build block -- Put the orange build block on the front of the blue build block -- Put the orange build block on the left of the green build block -- Put the blue build block on the right of the green build block -- Put the green build block on the behind of the blue build block -- Put the orange build block on the right of the build block -- Put the orange build block on the right of the green build block -- Put the red build block on the behind of the orange build block -- Put the red build block on the front of the yellow build block -- Put the orange build block on the front of the yellow build block -- Put the blue build block on the behind of the green build block -- Put the orange build block in the center of the table -- Put the yellow build block in the center of the table -- Put the green build block on the behind of the yellow build block -- Put the orange build block on the left of the blue build block -- Put the red build block on the right of the blue build block -- Put the yellow build block in the center of the table -- Put the red build block on the behind of the blue build block -- Put the blue build block on the behind of the orange build block -- Put the orange build block on the behind of the green build block -- Put the red build block on the right of the yellow build block -- Put the yellow build block on the front of the orange build block -- Put the blue build block on the right of the green build block -- Put the blue build block on the right of the red build block -- Put the red build block on the behind of the orange build block -- Put the yellow build block on the behind of the green build block -- Put the red build block on the right of the blue build block -- Put the green build block on the behind of the yellow build block -- Put the yellow build block on the behind of the green build block -- Put the orange build block on the behind of the red build block -- Put theyellow build block on the behind of the blue build block -- Put the green build block on the behind of the yellow build block -- Put the red build block on the right of the orange build block -- Put the yellow build block on the right of the red build block -- Put the blue build block on the right of the orange build block -- Put the green build block on the right of the yellow build block -- Put the orange build block in the center of the table -- Put the green build block on the orange build block -- Put the yellow build block on the right of the blue build block -- Put the green build block on the right of the red build block -- Put the green build block on the blue build block -- Put the green build block on the right of the orange build block -- Put the blue build block in the center of the table -- Put the orange build block on the behind of the green build block -- Put the red build block on the right of the yellow build block -- Put the blue build block on the behind of the green build block -- Put the orange build block on the right of the blue build block -- Put the yellow build block on the behind of the orange build block -- Put the orange build block in the center of the table -- Put the blue build block on the right of the yellow build block -- Put the orange build block on the behind of the yellow build block -- Put the yellow build block on the behind of the green build block -- Put the blue build block on the front of the orange build block -- Put the green build block on the behind of the red build block -- Put the green build block in the center of the table -- Put the green build block on the right of the yellow build block -- Put the red build block on the right of the blue build block -- Put the red build block on the behind of the green build block -- Put the blue build block on the behind of the orange build block -- Put the yellow build block on the right of the red build block -- Put the blue build block in the center of the table -- Put the red build block on the behind of the yellow build block -- Put the yellow build block on the right of the red build block -- Put the green build block on the right of the orange build block -- Put the red build block on the blue build block -- Put the orange build block on the left of the yellow build block -- Put the green build block on the right of the red build block -- Put the red build block on the left of the orange build block -- Put the orange build block on the right of the blue build block -- Put the red build block on the right of the orange build block -- Put the orange build block on the behind of the yellow build block -- Put the yellow build block in the center of the table -- Put the yellow build block on the behind of the red build block -- Put the yellow build block on the behind of the blue build block -- Put the orange build block on the behind of the red build block -- Put the yellow build block in the center of the table -- Put the green build block on the right of the blue build block -- Put the orange build block on the right of the yellow build block -- Put the blue build block on the right of the yellow build block -- Put the red build block on the right of the green build block -- Put the orange build block on the behind of the yellow build block -- Put the yellow build block on the blue build block -- Put the red build block on the right of the orange build block -- Put the bluebuild block on the right of the yellow build block -- Put the red build block in the center of the table -- Put the red build block on the blue build block -- Put the orange build block on the right of the blue build block -- Put the blue build block on the behind of the orange build block -- Put the yellow build block on the right of the red build block -- Put the yellow build block on the behind of the green build block -- Put the yellow build block on the right of the orange build block -- Put the orange build block on the behind of the blue build block -- Put the orange build block on the right of the red build block -- Put the green build block on the behind of the orange build block -- Put the blue build block on the behind of the yellow build block -- Put the yellow build block on the right of the orange build block -- Put the green build block on the behind of the yellow build block -- Put the green build block on the right of the red build block -- Put the red build block on the front of the blue build block -- Put the red build block on the behind of the green' build block -- Put the red build block on the behind of the orange build block -- Put the red build block on the left of the green build block -- Put the green build block on the behind of the yellow build block -- Put blue the build block on the right of the red build block -- Put the green build block on the orange build block -- Put the orange build block on the right of the red build block -- Put the blue build block on the right of the green build block -- Put the yellow build block on the right of the blue build block -- Put the orange build block on the behind of the blue build block -- Put the yellow build block in the center of the table -- Put the yellow build block on the right of the green build block -- Put the green build block on the behind of the yellow build block -- Put the blue build block in the center of the table -- Put the orange build block in the center of the table -- Put the yellow build block on the right of the green build block -- Put the red build block on the right of the yellow build block -- Put the yellow build block on the behind of the red build block -- Put the red build block on the behind of the green build block -- Put the green build block on the behind of the blue build block -- Put the green build block on the behind of the orange build block -- Put the blue build block on the right of the green build block -- Put the green build block in the center of the table -- Put the green build block on the behind of the blue build block -- Put the green build block on the left of the orange build block -- Put the red build block on the behind of the yellow build block -- Put the yellow build block on the right of the green build block -- Put the blue build block on the behind of the red build block -- Put the orange build block on the behind of the red build block -- Put the blue build block on the behind of the green build block -- Put the yellow build block on the blue build block -- Put the red build block on the front of the green build block -- Put the red build block on the left of the ornage build block -- Put the green build block in the center of the table -- Put the red build block on the right of the green build block -- Put the yellow build block on the right of the orange build block -- Put the yellow build block on the right of the blue build block -- Put the XX build block on the XX build block -- Put the yellow build block in the center of the table -- Put the orange build block on the green build block -- End -- Put the blue build block on the right of the red build block -- Put the green build block on the right of the orange build block -- Put the greem build block on the behind of the blue build block -- Put the left build block on the left of the green build block -- Put the green build block on the right of the orange build block -- Put the red build block on the left of the blue build block -- Put the blue build block on the right of the orange build block -- Put the yellow build block on the behind of the orange build block -- Put the green build block on the behind of the blue build block -- Put the yellow build block on the right of the orange build block -- Put the blue build block on the behind of the orange build block -- Put the red build block on the front of the green build block -- Put the red build block on the front of the yellow build block -- Put the yellow build block on the behind of the blue build block -- Put the blue build block on the orange build block -- Put the green build block in the center of the table -- Put the orange build block on the front of the blue build block -- Put the blue build block on the right of the yellow build block -- Put the blue build block on the left of the yellow build block -- Put the orange build block on the right of the yellow build block -- Put the orange build block on the left of the red build block -- Put the green build block in the center of the table -- Put the red build block on the right of the blue build block -- Put the blue build block on the right of the yellow build block -- Put the green build block on the right of the orange build block -- Put the blue build block in the center of the table -- Put the green build block on the right of the red build block -- Put the green build block on the right of the blue build block -- Put the yellow build block in the center of the table -- Abnormal -- Put the red build block on the behind of the orange build block -- Put the red build block on the yellow build block -- Put the orange build block on the right of the yellow build block -- Put the red build block on the right of the yellow build block -- Put the orange build block on the right of the blue build block -- Put the orange build block on the orange build block -- Put the blue build block on the behind of the red build block -- Put the yellow build block on the right of the blue build block -- Put the yellow build block on the behind of the green build block -- Put the yellow build block on the left of the blue build block -- Put the yellow build block on the right of the orange build block -- Put the yellow build block on the blue build block -- Put the yellow build block on the behind of the blue build block -- Put the green build block on the right of the blue build block -- Put the orange build block on the right of the yellow build block -- Put the red build block on the behind of the green build block -- Put the red build block in the center of the table -- Put the yellow build block on the behind of the blue build block -- Put the green build block on the behind of the orange build block -- Put the yellow build block on the behind of the red build block -- Put the green build block on the right of the yellow build block -- Put the green build block on the red build block -- Put the blue build block on the right of the orange build block -- Put the red build block on the behind of the orange build block -- Put the red build block in the center of the table -- Put the red build block in the center of the table -- Put the green build block on the right of the orange build block -- Put the green build block on the front of the blue build block -- Put the yellow build block on the behind of the orange build block -- Put the yellow build block on the behind of the blue build block -- Put the green build block on the blue build block -- Put the blue build block on the right of the yellow build block -- Put the blue build block on the right of the orange build block -- Put the orange build block on the left of the red build block -- Put the orange build block on the right of the blue build block -- Put the blue build block on the front of the green build block -- Put the yellow build block on the orange build block -- Put the yellow build block on the front of the red build block -- Put the orange build block on the behind of the red build block -- Put the yellow build block on the behind of the green build block -- Put the blue build block on the right of orange the build block -- Put the red build block on the right of the blue build block -- Put the green build block on the behind of the orange build block -- Put the green build block on the behind of the yellow build block -- Put the yellow build block on the right of the green build block -- Put the orange build block on the right of the green build block -- Put the orange build block on the right of the yellow build block -- Put the red build block on the green build block -- Put the red build block on the right of the blue build block -- Put the blue build block on the behind of the green build block -- Put the orange build block on the right of the red build block -- Put the blue build block on the red build block -- Put the orange build block on the right of the blue build block -- Put the orange build block on the left of the yellow build block -- Put the right build block on the right of the yellow build block -- Put the green build block on the right of the red build block -- Put the orange build block on the right of the green build block -- Put the blue build block in the center of the table -- Put the orange build block on the behind of the yellow build block -- Put the orange build block on the right of the blue build block -- Put the orange build block on the behind of the blue build block -- Put the red build block on the behind of the yellow build block -- Put the ornage build block on the behind of the green build block -- Put the blue build block on the left of the yellow build block -- Put the green build block on the front of the blue build block -- Put the green build block on the behind of the red build block -- Put the green build block on the right of the orange build block -- Put the green build block on the right of the orange build block -- Put the yellow build block on the behind of the red build block -- Put the blue build block on the green build block -- Put the blue build block on the right of the orange build block -- Put the orange build block on the right of the green build block -- Put the orange build block on the behind of the yellow build block -- Put the green build block in the center of the table -- Put the blue build block on the behind of the orange build block -- Put the green build block on the left of the orange build block -- Put the green build block on the right of the yellow build block -- Put the green build block on the left of the blue build block -- Put the orange build block on the right of the green build block -- Put the green build block on the orange build block -- Put the blue build block on the behind of the green build block -- Put the yellow build block on the behind of the red build block -- Put the orange build block on the green build block -- Put the red build block on the behind of the blue build block -- Put the blue build block on the right of the orange build block -- move the build block to the center of the table -- Put the green build block on the right of the red build block -- Put the red build block on the behind of the green build block -- Put the red build block on the behind of the blue build block -- Put the yellow build block on the behind of the orange build block -- Put the red build block on the right of the orange build block -- Put the green build block on the right of the yellow build block -- Put the red build block on the right of the yellow build block -- Put the red build block in the center of the table -- Put the red build block on the right of the blue build block -- Put the yellow build block on the behind of the green build block -- Put the red build block on the right of the orange build block -- Put the blue build block on the right of the orange build block -- Put the orange build block on the behind of the green build block -- Put the blue build block on the front of the green build block -- Put the ornage build block on the behind of the blue build block -- Put the yellow build block on the behind of the orange build block -- Put the green build block on the behind of the blue build block -- Put the green build block on the left of the blue build block -- Put the blue build block in the center of the table -- Put the green build block on the right of the yellow build block -- Put the yellow build block on the behind of the blue build block -- Put the green build block on the right of the orange build block -- Put the green build block on the blue build block -- Put the orange build block on the behind of the blue build block -- Put the red build block on the behind of the yellow build block -- Put the orange build block on the blue build block -- Put the green build block on the right of the orange build block -- Put the orange build block on the left of the red build block -- Put the green build block in the center of the table -- 'null' +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 0 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 1 +- subtask: Put the yellow build block in the center of the table + subtask_index: 2 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 3 +- subtask: 'Put the green build block on the red build block + + ' + subtask_index: 4 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 5 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 6 +- subtask: 'Put the orange build block on the right of the red build block + + ' + subtask_index: 7 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 8 +- subtask: 'Put the green build block on the left of the blue build block + + ' + subtask_index: 9 +- subtask: 'Put the blue build block on the left of the yellow build block + + ' + subtask_index: 10 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 11 +- subtask: 'Put the orange build block on the front of the blue build block + + ' + subtask_index: 12 +- subtask: 'Put the orange build block on the left of the green build block + + ' + subtask_index: 13 +- subtask: 'Put the blue build block on the right of the green build block + + ' + subtask_index: 14 +- subtask: 'Put the green build block on the behind of the blue build block + + ' + subtask_index: 15 +- subtask: 'Put the orange build block on the right of the build block + + ' + subtask_index: 16 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 17 +- subtask: 'Put the red build block on the behind of the orange build block + + ' + subtask_index: 18 +- subtask: 'Put the red build block on the front of the yellow build block + + ' + subtask_index: 19 +- subtask: 'Put the orange build block on the front of the yellow build block + + ' + subtask_index: 20 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 21 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 22 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 23 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 24 +- subtask: 'Put the orange build block on the left of the blue build block + + ' + subtask_index: 25 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 26 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 27 +- subtask: 'Put the red build block on the behind of the blue build block + + ' + subtask_index: 28 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 29 +- subtask: 'Put the orange build block on the behind of the green build block + + ' + subtask_index: 30 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 31 +- subtask: 'Put the yellow build block on the front of the orange build block + + ' + subtask_index: 32 +- subtask: 'Put the blue build block on the right of the green build block + + ' + subtask_index: 33 +- subtask: 'Put the blue build block on the right of the red build block + + ' + subtask_index: 34 +- subtask: 'Put the red build block on the behind of the orange build block + + ' + subtask_index: 35 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 36 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 37 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 38 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 39 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 40 +- subtask: 'Put theyellow build block on the behind of the blue build block + + ' + subtask_index: 41 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 42 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 43 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 44 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 45 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 46 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 47 +- subtask: 'Put the green build block on the orange build block + + ' + subtask_index: 48 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 49 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 50 +- subtask: 'Put the green build block on the blue build block + + ' + subtask_index: 51 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 52 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 53 +- subtask: 'Put the orange build block on the behind of the green build block + + ' + subtask_index: 54 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 55 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 56 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 57 +- subtask: 'Put the yellow build block on the behind of the orange build block + + ' + subtask_index: 58 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 59 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 60 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 61 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 62 +- subtask: 'Put the blue build block on the front of the orange build block + + ' + subtask_index: 63 +- subtask: 'Put the green build block on the behind of the red build block + + ' + subtask_index: 64 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 65 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 66 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 67 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 68 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 69 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 70 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 71 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 72 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 73 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 74 +- subtask: 'Put the red build block on the blue build block + + ' + subtask_index: 75 +- subtask: 'Put the orange build block on the left of the yellow build block + + ' + subtask_index: 76 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 77 +- subtask: 'Put the red build block on the left of the orange build block + + ' + subtask_index: 78 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 79 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 80 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 81 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 82 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 83 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 84 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 85 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 86 +- subtask: 'Put the green build block on the right of the blue build block + + ' + subtask_index: 87 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 88 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 89 +- subtask: 'Put the red build block on the right of the green build block + + ' + subtask_index: 90 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 91 +- subtask: 'Put the yellow build block on the blue build block + + ' + subtask_index: 92 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 93 +- subtask: 'Put the bluebuild block on the right of the yellow build block + + ' + subtask_index: 94 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 95 +- subtask: 'Put the red build block on the blue build block + + ' + subtask_index: 96 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 97 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 98 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 99 +- subtask: ' + + Put the yellow build block on the behind of the green build block + + ' + subtask_index: 100 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 101 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 102 +- subtask: 'Put the orange build block on the right of the red build block + + ' + subtask_index: 103 +- subtask: 'Put the green build block on the behind of the orange build block + + ' + subtask_index: 104 +- subtask: 'Put the blue build block on the behind of the yellow build block + + ' + subtask_index: 105 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 106 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 107 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 108 +- subtask: 'Put the red build block on the front of the blue build block + + ' + subtask_index: 109 +- subtask: 'Put the red build block on the behind of the green'' build block + + ' + subtask_index: 110 +- subtask: 'Put the red build block on the behind of the orange build block + + ' + subtask_index: 111 +- subtask: 'Put the red build block on the left of the green build block + + ' + subtask_index: 112 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 113 +- subtask: 'Put blue the build block on the right of the red build block + + ' + subtask_index: 114 +- subtask: 'Put the green build block on the orange build block + + ' + subtask_index: 115 +- subtask: 'Put the orange build block on the right of the red build block + + ' + subtask_index: 116 +- subtask: 'Put the blue build block on the right of the green build block + + ' + subtask_index: 117 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 118 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 119 +- subtask: ' + + Put the yellow build block in the center of the table' + subtask_index: 120 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 121 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 122 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 123 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 124 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 125 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 126 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 127 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 128 +- subtask: Put the green build block on the behind of the blue build block + subtask_index: 129 +- subtask: 'Put the green build block on the behind of the orange build block + + ' + subtask_index: 130 +- subtask: 'Put the blue build block on the right of the green build block + + ' + subtask_index: 131 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 132 +- subtask: Put the green build block on the behind of the blue build block + subtask_index: 133 +- subtask: 'Put the green build block on the left of the orange build block + + ' + subtask_index: 134 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 135 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 136 +- subtask: 'Put the blue build block on the behind of the red build block + + ' + subtask_index: 137 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 138 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 139 +- subtask: 'Put the yellow build block on the blue build block + + ' + subtask_index: 140 +- subtask: 'Put the red build block on the front of the green build block + + ' + subtask_index: 141 +- subtask: 'Put the red build block on the left of the ornage build block + + ' + subtask_index: 142 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 143 +- subtask: 'Put the red build block on the right of the green build block + + ' + subtask_index: 144 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 145 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 146 +- subtask: Put the XX build block on the XX build block + subtask_index: 147 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 148 +- subtask: 'Put the orange build block on the green build block + + ' + subtask_index: 149 +- subtask: End + subtask_index: 150 +- subtask: 'Put the blue build block on the right of the red build block + + ' + subtask_index: 151 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 152 +- subtask: 'Put the greem build block on the behind of the blue build block + + ' + subtask_index: 153 +- subtask: 'Put the left build block on the left of the green build block + + ' + subtask_index: 154 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 155 +- subtask: 'Put the red build block on the left of the blue build block + + ' + subtask_index: 156 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 157 +- subtask: 'Put the yellow build block on the behind of the orange build block + + ' + subtask_index: 158 +- subtask: 'Put the green build block on the behind of the blue build block + + ' + subtask_index: 159 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 160 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 161 +- subtask: 'Put the red build block on the front of the green build block + + ' + subtask_index: 162 +- subtask: 'Put the red build block on the front of the yellow build block + + ' + subtask_index: 163 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 164 +- subtask: 'Put the blue build block on the orange build block + + ' + subtask_index: 165 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 166 +- subtask: 'Put the orange build block on the front of the blue build block + + ' + subtask_index: 167 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 168 +- subtask: 'Put the blue build block on the left of the yellow build block + + ' + subtask_index: 169 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 170 +- subtask: 'Put the orange build block on the left of the red build block + + ' + subtask_index: 171 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 172 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 173 +- subtask: Put the blue build block on the right of the yellow build block + subtask_index: 174 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 175 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 176 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 177 +- subtask: 'Put the green build block on the right of the blue build block + + ' + subtask_index: 178 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 179 +- subtask: Abnormal + subtask_index: 180 +- subtask: 'Put the red build block on the behind of the orange build block + + ' + subtask_index: 181 +- subtask: 'Put the red build block on the yellow build block + + ' + subtask_index: 182 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 183 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 184 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 185 +- subtask: 'Put the orange build block on the orange build block + + ' + subtask_index: 186 +- subtask: 'Put the blue build block on the behind of the red build block + + ' + subtask_index: 187 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 188 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 189 +- subtask: 'Put the yellow build block on the left of the blue build block + + ' + subtask_index: 190 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 191 +- subtask: 'Put the yellow build block on the blue build block + + ' + subtask_index: 192 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 193 +- subtask: 'Put the green build block on the right of the blue build block + + ' + subtask_index: 194 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 195 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 196 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 197 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 198 +- subtask: 'Put the green build block on the behind of the orange build block + + ' + subtask_index: 199 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 200 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 201 +- subtask: 'Put the green build block on the red build block + + ' + subtask_index: 202 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 203 +- subtask: 'Put the red build block on the behind of the orange build block + + ' + subtask_index: 204 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 205 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 206 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 207 +- subtask: 'Put the green build block on the front of the blue build block + + ' + subtask_index: 208 +- subtask: 'Put the yellow build block on the behind of the orange build block + + ' + subtask_index: 209 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 210 +- subtask: 'Put the green build block on the blue build block + + ' + subtask_index: 211 +- subtask: Put the blue build block on the right of the yellow build block + subtask_index: 212 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 213 +- subtask: 'Put the orange build block on the left of the red build block + + ' + subtask_index: 214 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 215 +- subtask: 'Put the blue build block on the front of the green build block + + ' + subtask_index: 216 +- subtask: 'Put the yellow build block on the orange build block + + ' + subtask_index: 217 +- subtask: 'Put the yellow build block on the front of the red build block + + ' + subtask_index: 218 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 219 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 220 +- subtask: 'Put the blue build block on the right of orange the build block + + ' + subtask_index: 221 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 222 +- subtask: 'Put the green build block on the behind of the orange build block + + ' + subtask_index: 223 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 224 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 225 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 226 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 227 +- subtask: 'Put the red build block on the green build block + + ' + subtask_index: 228 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 229 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 230 +- subtask: 'Put the orange build block on the right of the red build block + + ' + subtask_index: 231 +- subtask: 'Put the blue build block on the red build block + + ' + subtask_index: 232 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 233 +- subtask: 'Put the orange build block on the left of the yellow build block + + ' + subtask_index: 234 +- subtask: 'Put the right build block on the right of the yellow build block + + ' + subtask_index: 235 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 236 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 237 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 238 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 239 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 240 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 241 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 242 +- subtask: 'Put the ornage build block on the behind of the green build block + + ' + subtask_index: 243 +- subtask: 'Put the blue build block on the left of the yellow build block + + ' + subtask_index: 244 +- subtask: 'Put the green build block on the front of the blue build block + + ' + subtask_index: 245 +- subtask: 'Put the green build block on the behind of the red build block + + ' + subtask_index: 246 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 247 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 248 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 249 +- subtask: 'Put the blue build block on the green build block + + ' + subtask_index: 250 +- subtask: Put the blue build block on the right of the orange build block + subtask_index: 251 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 252 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 253 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 254 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 255 +- subtask: Put the green build block on the left of the orange build block + subtask_index: 256 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 257 +- subtask: 'Put the green build block on the left of the blue build block + + ' + subtask_index: 258 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 259 +- subtask: 'Put the green build block on the orange build block + + ' + subtask_index: 260 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 261 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 262 +- subtask: 'Put the orange build block on the green build block + + ' + subtask_index: 263 +- subtask: 'Put the red build block on the behind of the blue build block + + ' + subtask_index: 264 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 265 +- subtask: move the build block to the center of the table + subtask_index: 266 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 267 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 268 +- subtask: 'Put the red build block on the behind of the blue build block + + ' + subtask_index: 269 +- subtask: 'Put the yellow build block on the behind of the orange build block + + ' + subtask_index: 270 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 271 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 272 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 273 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 274 +- subtask: Put the red build block on the right of the blue build block + subtask_index: 275 +- subtask: Put the yellow build block on the behind of the green build block + subtask_index: 276 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 277 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 278 +- subtask: 'Put the orange build block on the behind of the green build block + + ' + subtask_index: 279 +- subtask: 'Put the blue build block on the front of the green build block + + ' + subtask_index: 280 +- subtask: 'Put the ornage build block on the behind of the blue build block + + ' + subtask_index: 281 +- subtask: 'Put the yellow build block on the behind of the orange build block + + ' + subtask_index: 282 +- subtask: 'Put the green build block on the behind of the blue build block + + ' + subtask_index: 283 +- subtask: 'Put the green build block on the left of the blue build block + + ' + subtask_index: 284 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 285 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 286 +- subtask: Put the yellow build block on the behind of the blue build block + subtask_index: 287 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 288 +- subtask: 'Put the green build block on the blue build block + + ' + subtask_index: 289 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 290 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 291 +- subtask: 'Put the orange build block on the blue build block + + ' + subtask_index: 292 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 293 +- subtask: 'Put the orange build block on the left of the red build block + + ' + subtask_index: 294 +- subtask: Put the green build block in the center of the table + subtask_index: 295 +- subtask: 'null' + subtask_index: 296 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_chest_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -369,13 +1231,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -383,8 +1242,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 238 total_frames: 232528 fps: 30 @@ -471,11 +1329,9 @@ data_structure: 'Agilex_Cobot_Magic_connect_block_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:237 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -767,7 +1623,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -775,7 +1631,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -802,1338 +1657,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_chest_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_connect_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - According to the building block template in front, assemble the scattered blocks - together. - sub_tasks: - - subtask: 'Put the yellow build block on the right of the green build block - - ' - subtask_index: 0 - - subtask: 'Put the blue build block on the behind of the green build block - - ' - subtask_index: 1 - - subtask: Put the yellow build block in the center of the table - subtask_index: 2 - - subtask: 'Put the blue build block on the right of the yellow build block - - ' - subtask_index: 3 - - subtask: 'Put the green build block on the red build block - - ' - subtask_index: 4 - - subtask: 'Put the yellow build block on the behind of the green build block - - ' - subtask_index: 5 - - subtask: 'Put the orange build block on the right of the yellow build block - - ' - subtask_index: 6 - - subtask: 'Put the orange build block on the right of the red build block - - ' - subtask_index: 7 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 8 - - subtask: 'Put the green build block on the left of the blue build block - - ' - subtask_index: 9 - - subtask: 'Put the blue build block on the left of the yellow build block - - ' - subtask_index: 10 - - subtask: 'Put the yellow build block on the right of the green build block - - ' - subtask_index: 11 - - subtask: 'Put the orange build block on the front of the blue build block - - ' - subtask_index: 12 - - subtask: 'Put the orange build block on the left of the green build block - - ' - subtask_index: 13 - - subtask: 'Put the blue build block on the right of the green build block - - ' - subtask_index: 14 - - subtask: 'Put the green build block on the behind of the blue build block - - ' - subtask_index: 15 - - subtask: 'Put the orange build block on the right of the build block - - ' - subtask_index: 16 - - subtask: 'Put the orange build block on the right of the green build block - - ' - subtask_index: 17 - - subtask: 'Put the red build block on the behind of the orange build block - - ' - subtask_index: 18 - - subtask: 'Put the red build block on the front of the yellow build block - - ' - subtask_index: 19 - - subtask: 'Put the orange build block on the front of the yellow build block - - ' - subtask_index: 20 - - subtask: 'Put the blue build block on the behind of the green build block - - ' - subtask_index: 21 - - subtask: 'Put the orange build block in the center of the table - - ' - subtask_index: 22 - - subtask: 'Put the yellow build block in the center of the table - - ' - subtask_index: 23 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 24 - - subtask: 'Put the orange build block on the left of the blue build block - - ' - subtask_index: 25 - - subtask: 'Put the red build block on the right of the blue build block - - ' - subtask_index: 26 - - subtask: 'Put the yellow build block in the center of the table - - ' - subtask_index: 27 - - subtask: 'Put the red build block on the behind of the blue build block - - ' - subtask_index: 28 - - subtask: 'Put the blue build block on the behind of the orange build block - - ' - subtask_index: 29 - - subtask: 'Put the orange build block on the behind of the green build block - - ' - subtask_index: 30 - - subtask: 'Put the red build block on the right of the yellow build block - - ' - subtask_index: 31 - - subtask: 'Put the yellow build block on the front of the orange build block - - ' - subtask_index: 32 - - subtask: 'Put the blue build block on the right of the green build block - - ' - subtask_index: 33 - - subtask: 'Put the blue build block on the right of the red build block - - ' - subtask_index: 34 - - subtask: 'Put the red build block on the behind of the orange build block - - ' - subtask_index: 35 - - subtask: 'Put the yellow build block on the behind of the green build block - - ' - subtask_index: 36 - - subtask: 'Put the red build block on the right of the blue build block - - ' - subtask_index: 37 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 38 - - subtask: 'Put the yellow build block on the behind of the green build block - - ' - subtask_index: 39 - - subtask: 'Put the orange build block on the behind of the red build block - - ' - subtask_index: 40 - - subtask: 'Put theyellow build block on the behind of the blue build block - - ' - subtask_index: 41 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 42 - - subtask: 'Put the red build block on the right of the orange build block - - ' - subtask_index: 43 - - subtask: 'Put the yellow build block on the right of the red build block - - ' - subtask_index: 44 - - subtask: 'Put the blue build block on the right of the orange build block - - ' - subtask_index: 45 - - subtask: 'Put the green build block on the right of the yellow build block - - ' - subtask_index: 46 - - subtask: 'Put the orange build block in the center of the table - - ' - subtask_index: 47 - - subtask: 'Put the green build block on the orange build block - - ' - subtask_index: 48 - - subtask: 'Put the yellow build block on the right of the blue build block - - ' - subtask_index: 49 - - subtask: 'Put the green build block on the right of the red build block - - ' - subtask_index: 50 - - subtask: 'Put the green build block on the blue build block - - ' - subtask_index: 51 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 52 - - subtask: 'Put the blue build block in the center of the table - - ' - subtask_index: 53 - - subtask: 'Put the orange build block on the behind of the green build block - - ' - subtask_index: 54 - - subtask: 'Put the red build block on the right of the yellow build block - - ' - subtask_index: 55 - - subtask: 'Put the blue build block on the behind of the green build block - - ' - subtask_index: 56 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 57 - - subtask: 'Put the yellow build block on the behind of the orange build block - - ' - subtask_index: 58 - - subtask: 'Put the orange build block in the center of the table - - ' - subtask_index: 59 - - subtask: 'Put the blue build block on the right of the yellow build block - - ' - subtask_index: 60 - - subtask: 'Put the orange build block on the behind of the yellow build block - - ' - subtask_index: 61 - - subtask: 'Put the yellow build block on the behind of the green build block - - ' - subtask_index: 62 - - subtask: 'Put the blue build block on the front of the orange build block - - ' - subtask_index: 63 - - subtask: 'Put the green build block on the behind of the red build block - - ' - subtask_index: 64 - - subtask: 'Put the green build block in the center of the table - - ' - subtask_index: 65 - - subtask: 'Put the green build block on the right of the yellow build block - - ' - subtask_index: 66 - - subtask: 'Put the red build block on the right of the blue build block - - ' - subtask_index: 67 - - subtask: 'Put the red build block on the behind of the green build block - - ' - subtask_index: 68 - - subtask: 'Put the blue build block on the behind of the orange build block - - ' - subtask_index: 69 - - subtask: 'Put the yellow build block on the right of the red build block - - ' - subtask_index: 70 - - subtask: 'Put the blue build block in the center of the table - - ' - subtask_index: 71 - - subtask: 'Put the red build block on the behind of the yellow build block - - ' - subtask_index: 72 - - subtask: 'Put the yellow build block on the right of the red build block - - ' - subtask_index: 73 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 74 - - subtask: 'Put the red build block on the blue build block - - ' - subtask_index: 75 - - subtask: 'Put the orange build block on the left of the yellow build block - - ' - subtask_index: 76 - - subtask: 'Put the green build block on the right of the red build block - - ' - subtask_index: 77 - - subtask: 'Put the red build block on the left of the orange build block - - ' - subtask_index: 78 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 79 - - subtask: 'Put the red build block on the right of the orange build block - - ' - subtask_index: 80 - - subtask: 'Put the orange build block on the behind of the yellow build block - - ' - subtask_index: 81 - - subtask: 'Put the yellow build block in the center of the table - - ' - subtask_index: 82 - - subtask: 'Put the yellow build block on the behind of the red build block - - ' - subtask_index: 83 - - subtask: 'Put the yellow build block on the behind of the blue build block - - ' - subtask_index: 84 - - subtask: 'Put the orange build block on the behind of the red build block - - ' - subtask_index: 85 - - subtask: 'Put the yellow build block in the center of the table - - ' - subtask_index: 86 - - subtask: 'Put the green build block on the right of the blue build block - - ' - subtask_index: 87 - - subtask: 'Put the orange build block on the right of the yellow build block - - ' - subtask_index: 88 - - subtask: 'Put the blue build block on the right of the yellow build block - - ' - subtask_index: 89 - - subtask: 'Put the red build block on the right of the green build block - - ' - subtask_index: 90 - - subtask: 'Put the orange build block on the behind of the yellow build block - - ' - subtask_index: 91 - - subtask: 'Put the yellow build block on the blue build block - - ' - subtask_index: 92 - - subtask: 'Put the red build block on the right of the orange build block - - ' - subtask_index: 93 - - subtask: 'Put the bluebuild block on the right of the yellow build block - - ' - subtask_index: 94 - - subtask: 'Put the red build block in the center of the table - - ' - subtask_index: 95 - - subtask: 'Put the red build block on the blue build block - - ' - subtask_index: 96 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 97 - - subtask: 'Put the blue build block on the behind of the orange build block - - ' - subtask_index: 98 - - subtask: 'Put the yellow build block on the right of the red build block - - ' - subtask_index: 99 - - subtask: ' - - Put the yellow build block on the behind of the green build block - - ' - subtask_index: 100 - - subtask: 'Put the yellow build block on the right of the orange build block - - ' - subtask_index: 101 - - subtask: 'Put the orange build block on the behind of the blue build block - - ' - subtask_index: 102 - - subtask: 'Put the orange build block on the right of the red build block - - ' - subtask_index: 103 - - subtask: 'Put the green build block on the behind of the orange build block - - ' - subtask_index: 104 - - subtask: 'Put the blue build block on the behind of the yellow build block - - ' - subtask_index: 105 - - subtask: 'Put the yellow build block on the right of the orange build block - - ' - subtask_index: 106 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 107 - - subtask: 'Put the green build block on the right of the red build block - - ' - subtask_index: 108 - - subtask: 'Put the red build block on the front of the blue build block - - ' - subtask_index: 109 - - subtask: 'Put the red build block on the behind of the green'' build block - - ' - subtask_index: 110 - - subtask: 'Put the red build block on the behind of the orange build block - - ' - subtask_index: 111 - - subtask: 'Put the red build block on the left of the green build block - - ' - subtask_index: 112 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 113 - - subtask: 'Put blue the build block on the right of the red build block - - ' - subtask_index: 114 - - subtask: 'Put the green build block on the orange build block - - ' - subtask_index: 115 - - subtask: 'Put the orange build block on the right of the red build block - - ' - subtask_index: 116 - - subtask: 'Put the blue build block on the right of the green build block - - ' - subtask_index: 117 - - subtask: 'Put the yellow build block on the right of the blue build block - - ' - subtask_index: 118 - - subtask: 'Put the orange build block on the behind of the blue build block - - ' - subtask_index: 119 - - subtask: ' - - Put the yellow build block in the center of the table' - subtask_index: 120 - - subtask: 'Put the yellow build block on the right of the green build block - - ' - subtask_index: 121 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 122 - - subtask: 'Put the blue build block in the center of the table - - ' - subtask_index: 123 - - subtask: 'Put the orange build block in the center of the table - - ' - subtask_index: 124 - - subtask: 'Put the yellow build block on the right of the green build block - - ' - subtask_index: 125 - - subtask: 'Put the red build block on the right of the yellow build block - - ' - subtask_index: 126 - - subtask: 'Put the yellow build block on the behind of the red build block - - ' - subtask_index: 127 - - subtask: 'Put the red build block on the behind of the green build block - - ' - subtask_index: 128 - - subtask: Put the green build block on the behind of the blue build block - subtask_index: 129 - - subtask: 'Put the green build block on the behind of the orange build block - - ' - subtask_index: 130 - - subtask: 'Put the blue build block on the right of the green build block - - ' - subtask_index: 131 - - subtask: 'Put the green build block in the center of the table - - ' - subtask_index: 132 - - subtask: Put the green build block on the behind of the blue build block - subtask_index: 133 - - subtask: 'Put the green build block on the left of the orange build block - - ' - subtask_index: 134 - - subtask: 'Put the red build block on the behind of the yellow build block - - ' - subtask_index: 135 - - subtask: 'Put the yellow build block on the right of the green build block - - ' - subtask_index: 136 - - subtask: 'Put the blue build block on the behind of the red build block - - ' - subtask_index: 137 - - subtask: 'Put the orange build block on the behind of the red build block - - ' - subtask_index: 138 - - subtask: 'Put the blue build block on the behind of the green build block - - ' - subtask_index: 139 - - subtask: 'Put the yellow build block on the blue build block - - ' - subtask_index: 140 - - subtask: 'Put the red build block on the front of the green build block - - ' - subtask_index: 141 - - subtask: 'Put the red build block on the left of the ornage build block - - ' - subtask_index: 142 - - subtask: 'Put the green build block in the center of the table - - ' - subtask_index: 143 - - subtask: 'Put the red build block on the right of the green build block - - ' - subtask_index: 144 - - subtask: 'Put the yellow build block on the right of the orange build block - - ' - subtask_index: 145 - - subtask: 'Put the yellow build block on the right of the blue build block - - ' - subtask_index: 146 - - subtask: Put the XX build block on the XX build block - subtask_index: 147 - - subtask: 'Put the yellow build block in the center of the table - - ' - subtask_index: 148 - - subtask: 'Put the orange build block on the green build block - - ' - subtask_index: 149 - - subtask: End - subtask_index: 150 - - subtask: 'Put the blue build block on the right of the red build block - - ' - subtask_index: 151 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 152 - - subtask: 'Put the greem build block on the behind of the blue build block - - ' - subtask_index: 153 - - subtask: 'Put the left build block on the left of the green build block - - ' - subtask_index: 154 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 155 - - subtask: 'Put the red build block on the left of the blue build block - - ' - subtask_index: 156 - - subtask: 'Put the blue build block on the right of the orange build block - - ' - subtask_index: 157 - - subtask: 'Put the yellow build block on the behind of the orange build block - - ' - subtask_index: 158 - - subtask: 'Put the green build block on the behind of the blue build block - - ' - subtask_index: 159 - - subtask: 'Put the yellow build block on the right of the orange build block - - ' - subtask_index: 160 - - subtask: 'Put the blue build block on the behind of the orange build block - - ' - subtask_index: 161 - - subtask: 'Put the red build block on the front of the green build block - - ' - subtask_index: 162 - - subtask: 'Put the red build block on the front of the yellow build block - - ' - subtask_index: 163 - - subtask: 'Put the yellow build block on the behind of the blue build block - - ' - subtask_index: 164 - - subtask: 'Put the blue build block on the orange build block - - ' - subtask_index: 165 - - subtask: 'Put the green build block in the center of the table - - ' - subtask_index: 166 - - subtask: 'Put the orange build block on the front of the blue build block - - ' - subtask_index: 167 - - subtask: 'Put the blue build block on the right of the yellow build block - - ' - subtask_index: 168 - - subtask: 'Put the blue build block on the left of the yellow build block - - ' - subtask_index: 169 - - subtask: 'Put the orange build block on the right of the yellow build block - - ' - subtask_index: 170 - - subtask: 'Put the orange build block on the left of the red build block - - ' - subtask_index: 171 - - subtask: 'Put the green build block in the center of the table - - ' - subtask_index: 172 - - subtask: 'Put the red build block on the right of the blue build block - - ' - subtask_index: 173 - - subtask: Put the blue build block on the right of the yellow build block - subtask_index: 174 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 175 - - subtask: 'Put the blue build block in the center of the table - - ' - subtask_index: 176 - - subtask: 'Put the green build block on the right of the red build block - - ' - subtask_index: 177 - - subtask: 'Put the green build block on the right of the blue build block - - ' - subtask_index: 178 - - subtask: 'Put the yellow build block in the center of the table - - ' - subtask_index: 179 - - subtask: Abnormal - subtask_index: 180 - - subtask: 'Put the red build block on the behind of the orange build block - - ' - subtask_index: 181 - - subtask: 'Put the red build block on the yellow build block - - ' - subtask_index: 182 - - subtask: 'Put the orange build block on the right of the yellow build block - - ' - subtask_index: 183 - - subtask: 'Put the red build block on the right of the yellow build block - - ' - subtask_index: 184 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 185 - - subtask: 'Put the orange build block on the orange build block - - ' - subtask_index: 186 - - subtask: 'Put the blue build block on the behind of the red build block - - ' - subtask_index: 187 - - subtask: 'Put the yellow build block on the right of the blue build block - - ' - subtask_index: 188 - - subtask: 'Put the yellow build block on the behind of the green build block - - ' - subtask_index: 189 - - subtask: 'Put the yellow build block on the left of the blue build block - - ' - subtask_index: 190 - - subtask: 'Put the yellow build block on the right of the orange build block - - ' - subtask_index: 191 - - subtask: 'Put the yellow build block on the blue build block - - ' - subtask_index: 192 - - subtask: 'Put the yellow build block on the behind of the blue build block - - ' - subtask_index: 193 - - subtask: 'Put the green build block on the right of the blue build block - - ' - subtask_index: 194 - - subtask: 'Put the orange build block on the right of the yellow build block - - ' - subtask_index: 195 - - subtask: 'Put the red build block on the behind of the green build block - - ' - subtask_index: 196 - - subtask: 'Put the red build block in the center of the table - - ' - subtask_index: 197 - - subtask: 'Put the yellow build block on the behind of the blue build block - - ' - subtask_index: 198 - - subtask: 'Put the green build block on the behind of the orange build block - - ' - subtask_index: 199 - - subtask: 'Put the yellow build block on the behind of the red build block - - ' - subtask_index: 200 - - subtask: 'Put the green build block on the right of the yellow build block - - ' - subtask_index: 201 - - subtask: 'Put the green build block on the red build block - - ' - subtask_index: 202 - - subtask: 'Put the blue build block on the right of the orange build block - - ' - subtask_index: 203 - - subtask: 'Put the red build block on the behind of the orange build block - - ' - subtask_index: 204 - - subtask: 'Put the red build block in the center of the table - - ' - subtask_index: 205 - - subtask: 'Put the red build block in the center of the table - - ' - subtask_index: 206 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 207 - - subtask: 'Put the green build block on the front of the blue build block - - ' - subtask_index: 208 - - subtask: 'Put the yellow build block on the behind of the orange build block - - ' - subtask_index: 209 - - subtask: 'Put the yellow build block on the behind of the blue build block - - ' - subtask_index: 210 - - subtask: 'Put the green build block on the blue build block - - ' - subtask_index: 211 - - subtask: Put the blue build block on the right of the yellow build block - subtask_index: 212 - - subtask: 'Put the blue build block on the right of the orange build block - - ' - subtask_index: 213 - - subtask: 'Put the orange build block on the left of the red build block - - ' - subtask_index: 214 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 215 - - subtask: 'Put the blue build block on the front of the green build block - - ' - subtask_index: 216 - - subtask: 'Put the yellow build block on the orange build block - - ' - subtask_index: 217 - - subtask: 'Put the yellow build block on the front of the red build block - - ' - subtask_index: 218 - - subtask: 'Put the orange build block on the behind of the red build block - - ' - subtask_index: 219 - - subtask: 'Put the yellow build block on the behind of the green build block - - ' - subtask_index: 220 - - subtask: 'Put the blue build block on the right of orange the build block - - ' - subtask_index: 221 - - subtask: 'Put the red build block on the right of the blue build block - - ' - subtask_index: 222 - - subtask: 'Put the green build block on the behind of the orange build block - - ' - subtask_index: 223 - - subtask: 'Put the green build block on the behind of the yellow build block - - ' - subtask_index: 224 - - subtask: 'Put the yellow build block on the right of the green build block - - ' - subtask_index: 225 - - subtask: 'Put the orange build block on the right of the green build block - - ' - subtask_index: 226 - - subtask: 'Put the orange build block on the right of the yellow build block - - ' - subtask_index: 227 - - subtask: 'Put the red build block on the green build block - - ' - subtask_index: 228 - - subtask: 'Put the red build block on the right of the blue build block - - ' - subtask_index: 229 - - subtask: 'Put the blue build block on the behind of the green build block - - ' - subtask_index: 230 - - subtask: 'Put the orange build block on the right of the red build block - - ' - subtask_index: 231 - - subtask: 'Put the blue build block on the red build block - - ' - subtask_index: 232 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 233 - - subtask: 'Put the orange build block on the left of the yellow build block - - ' - subtask_index: 234 - - subtask: 'Put the right build block on the right of the yellow build block - - ' - subtask_index: 235 - - subtask: 'Put the green build block on the right of the red build block - - ' - subtask_index: 236 - - subtask: 'Put the orange build block on the right of the green build block - - ' - subtask_index: 237 - - subtask: 'Put the blue build block in the center of the table - - ' - subtask_index: 238 - - subtask: 'Put the orange build block on the behind of the yellow build block - - ' - subtask_index: 239 - - subtask: 'Put the orange build block on the right of the blue build block - - ' - subtask_index: 240 - - subtask: 'Put the orange build block on the behind of the blue build block - - ' - subtask_index: 241 - - subtask: 'Put the red build block on the behind of the yellow build block - - ' - subtask_index: 242 - - subtask: 'Put the ornage build block on the behind of the green build block - - ' - subtask_index: 243 - - subtask: 'Put the blue build block on the left of the yellow build block - - ' - subtask_index: 244 - - subtask: 'Put the green build block on the front of the blue build block - - ' - subtask_index: 245 - - subtask: 'Put the green build block on the behind of the red build block - - ' - subtask_index: 246 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 247 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 248 - - subtask: 'Put the yellow build block on the behind of the red build block - - ' - subtask_index: 249 - - subtask: 'Put the blue build block on the green build block - - ' - subtask_index: 250 - - subtask: Put the blue build block on the right of the orange build block - subtask_index: 251 - - subtask: 'Put the orange build block on the right of the green build block - - ' - subtask_index: 252 - - subtask: 'Put the orange build block on the behind of the yellow build block - - ' - subtask_index: 253 - - subtask: 'Put the green build block in the center of the table - - ' - subtask_index: 254 - - subtask: 'Put the blue build block on the behind of the orange build block - - ' - subtask_index: 255 - - subtask: Put the green build block on the left of the orange build block - subtask_index: 256 - - subtask: 'Put the green build block on the right of the yellow build block - - ' - subtask_index: 257 - - subtask: 'Put the green build block on the left of the blue build block - - ' - subtask_index: 258 - - subtask: 'Put the orange build block on the right of the green build block - - ' - subtask_index: 259 - - subtask: 'Put the green build block on the orange build block - - ' - subtask_index: 260 - - subtask: 'Put the blue build block on the behind of the green build block - - ' - subtask_index: 261 - - subtask: 'Put the yellow build block on the behind of the red build block - - ' - subtask_index: 262 - - subtask: 'Put the orange build block on the green build block - - ' - subtask_index: 263 - - subtask: 'Put the red build block on the behind of the blue build block - - ' - subtask_index: 264 - - subtask: 'Put the blue build block on the right of the orange build block - - ' - subtask_index: 265 - - subtask: move the build block to the center of the table - subtask_index: 266 - - subtask: 'Put the green build block on the right of the red build block - - ' - subtask_index: 267 - - subtask: 'Put the red build block on the behind of the green build block - - ' - subtask_index: 268 - - subtask: 'Put the red build block on the behind of the blue build block - - ' - subtask_index: 269 - - subtask: 'Put the yellow build block on the behind of the orange build block - - ' - subtask_index: 270 - - subtask: 'Put the red build block on the right of the orange build block - - ' - subtask_index: 271 - - subtask: 'Put the green build block on the right of the yellow build block - - ' - subtask_index: 272 - - subtask: 'Put the red build block on the right of the yellow build block - - ' - subtask_index: 273 - - subtask: 'Put the red build block in the center of the table - - ' - subtask_index: 274 - - subtask: Put the red build block on the right of the blue build block - subtask_index: 275 - - subtask: Put the yellow build block on the behind of the green build block - subtask_index: 276 - - subtask: 'Put the red build block on the right of the orange build block - - ' - subtask_index: 277 - - subtask: 'Put the blue build block on the right of the orange build block - - ' - subtask_index: 278 - - subtask: 'Put the orange build block on the behind of the green build block - - ' - subtask_index: 279 - - subtask: 'Put the blue build block on the front of the green build block - - ' - subtask_index: 280 - - subtask: 'Put the ornage build block on the behind of the blue build block - - ' - subtask_index: 281 - - subtask: 'Put the yellow build block on the behind of the orange build block - - ' - subtask_index: 282 - - subtask: 'Put the green build block on the behind of the blue build block - - ' - subtask_index: 283 - - subtask: 'Put the green build block on the left of the blue build block - - ' - subtask_index: 284 - - subtask: 'Put the blue build block in the center of the table - - ' - subtask_index: 285 - - subtask: 'Put the green build block on the right of the yellow build block - - ' - subtask_index: 286 - - subtask: Put the yellow build block on the behind of the blue build block - subtask_index: 287 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 288 - - subtask: 'Put the green build block on the blue build block - - ' - subtask_index: 289 - - subtask: 'Put the orange build block on the behind of the blue build block - - ' - subtask_index: 290 - - subtask: 'Put the red build block on the behind of the yellow build block - - ' - subtask_index: 291 - - subtask: 'Put the orange build block on the blue build block - - ' - subtask_index: 292 - - subtask: 'Put the green build block on the right of the orange build block - - ' - subtask_index: 293 - - subtask: 'Put the orange build block on the left of the red build block - - ' - subtask_index: 294 - - subtask: Put the green build block in the center of the table - subtask_index: 295 - - subtask: 'null' - subtask_index: 296 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 232528 - dataset_size: 2.20 GB - data_structure: 'Agilex_Cobot_Magic_connect_block_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (226 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_chest_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_chest_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board.yaml index 0ec78d799fecd0e54c93e33cef1e6f3af18ce6de..6a2a7e544ba23eea2a4b3ffc46bb9b797d8587b1 100644 --- a/dataset_info/Agilex_Cobot_Magic_erase_board.yaml +++ b/dataset_info/Agilex_Cobot_Magic_erase_board.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,32 +51,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: wipe off the handwriting on the whiteboard. +task_instruction: +- wipe off the handwriting on the whiteboard. sub_tasks: -- Place the eraser with the left gripper -- end -- Grasp the eraser with the left gripper -- Grasp the eraser and wipe the blackboard with the right gripper -- Abnormal -- Grasp the eraser and wipe the blackboard with the left gripper -- Place the eraser with the right gripper -- Grasp the eraser with the right gripper -- 'null' +- subtask: Place the eraser with the left gripper + subtask_index: 0 +- subtask: end + subtask_index: 1 +- subtask: Grasp the eraser with the left gripper + subtask_index: 2 +- subtask: Grasp the eraser and wipe the blackboard with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Grasp the eraser and wipe the blackboard with the left gripper + subtask_index: 5 +- subtask: Place the eraser with the right gripper + subtask_index: 6 +- subtask: Grasp the eraser with the right gripper + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - lift - lower - wipe -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +94,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +105,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 24688 fps: 30 @@ -200,11 +206,9 @@ data_structure: 'Agilex_Cobot_Magic_erase_board_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -477,7 +481,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -485,7 +489,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -512,212 +515,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_erase_board - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - wipe off the handwriting on the whiteboard. - sub_tasks: - - subtask: Place the eraser with the left gripper - subtask_index: 0 - - subtask: end - subtask_index: 1 - - subtask: Grasp the eraser with the left gripper - subtask_index: 2 - - subtask: Grasp the eraser and wipe the blackboard with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Grasp the eraser and wipe the blackboard with the left gripper - subtask_index: 5 - - subtask: Place the eraser with the right gripper - subtask_index: 6 - - subtask: Grasp the eraser with the right gripper - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - lift - - lower - - wipe - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 24688 - dataset_size: 230.75 MB - data_structure: 'Agilex_Cobot_Magic_erase_board_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board_left.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board_left.yaml index 31c07db9ce435b242a3961ebb17233d6bdaf94ab..e71a65fff30b2f61d33e4a1557516e150084ad2d 100644 --- a/dataset_info/Agilex_Cobot_Magic_erase_board_left.yaml +++ b/dataset_info/Agilex_Cobot_Magic_erase_board_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left claw to pick up the whiteboard eraser, wipe the notes - on the whiteboard clean, and then put them back in place. +task_instruction: +- use the left claw to pick up the whiteboard eraser, wipe the notes on the whiteboard + clean, and then put them back in place. sub_tasks: -- Place the eraser with the left gripper -- Grasp the eraser with the left gripper -- Abnormal -- End -- Grasp the eraser and wipe the blackboard with the left gripper -- 'null' +- subtask: Place the eraser with the left gripper + subtask_index: 0 +- subtask: Grasp the eraser with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Grasp the eraser and wipe the blackboard with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - lift - wipe - handover -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -82,13 +89,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -96,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 23847 fps: 30 @@ -198,11 +201,9 @@ data_structure: 'Agilex_Cobot_Magic_erase_board_left_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -475,7 +476,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -483,7 +484,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -510,207 +510,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_erase_board_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left claw to pick up the whiteboard eraser, wipe the notes on the whiteboard - clean, and then put them back in place. - sub_tasks: - - subtask: Place the eraser with the left gripper - subtask_index: 0 - - subtask: Grasp the eraser with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Grasp the eraser and wipe the blackboard with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - lift - - wipe - - handover - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 23847 - dataset_size: 217.68 MB - data_structure: 'Agilex_Cobot_Magic_erase_board_left_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (37 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board_left_side.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board_left_side.yaml index 5c0fb4d8330d03578f4e8b4c061fb206154de942..aa8137401e836bee4a5f9aa8eb0467821ef1eee3 100644 --- a/dataset_info/Agilex_Cobot_Magic_erase_board_left_side.yaml +++ b/dataset_info/Agilex_Cobot_Magic_erase_board_left_side.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,31 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: wipe off the handwriting on the whiteboard. +task_instruction: +- wipe off the handwriting on the whiteboard. sub_tasks: -- Place the eraser with the left gripper -- Grasp the eraser with the left gripper -- End -- Grasp the eraser and wipe the blackboard with the right gripper -- Place the eraser with the right gripper -- Grasp the eraser with the right gripper -- Grasp the eraser and wipe the blackboard with the left gripper -- 'null' +- subtask: Place the eraser with the left gripper + subtask_index: 0 +- subtask: Grasp the eraser with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the eraser and wipe the blackboard with the right gripper + subtask_index: 3 +- subtask: Place the eraser with the right gripper + subtask_index: 4 +- subtask: Grasp the eraser with the right gripper + subtask_index: 5 +- subtask: Grasp the eraser and wipe the blackboard with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - lift - lower - wipe -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +92,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 101 total_frames: 51478 fps: 30 @@ -199,11 +204,9 @@ data_structure: 'Agilex_Cobot_Magic_erase_board_left_side_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:100 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -476,7 +479,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -484,7 +487,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -511,210 +513,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_erase_board_left_side - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - wipe off the handwriting on the whiteboard. - sub_tasks: - - subtask: Place the eraser with the left gripper - subtask_index: 0 - - subtask: Grasp the eraser with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the eraser and wipe the blackboard with the right gripper - subtask_index: 3 - - subtask: Place the eraser with the right gripper - subtask_index: 4 - - subtask: Grasp the eraser with the right gripper - subtask_index: 5 - - subtask: Grasp the eraser and wipe the blackboard with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - lift - - lower - - wipe - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 51478 - dataset_size: 490.77 MB - data_structure: 'Agilex_Cobot_Magic_erase_board_left_side_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (89 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml index ed1d6f3230239e3aeecc87e23605e689df8cdd22..e68b575fddb2572afdf27d6c7942de8ea85fc2eb 100644 --- a/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml +++ b/dataset_info/Agilex_Cobot_Magic_erase_board_passing_left_to_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,35 +51,45 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to place the eraser in the right, use the right - gripper to pick up the eraser, wipe the notes on the whiteboard clean, and then - put it down. +task_instruction: +- use the left gripper to place the eraser in the right, use the right gripper to + pick up the eraser, wipe the notes on the whiteboard clean, and then put it down. sub_tasks: -- Right gripper -- Move the board eraser to the center of the whiteboard with left gripper -- Wipe off the writing on the board with right gripper -- Place the board eraser on the right side of board with right gripper -- Move the board eraser to the right of the whiteboard with right gripper -- Pick up the board eraser with right gripper -- Move the board eraser to the right of the whiteboard with left gripper -- End -- Left gripper -- 'null' +- subtask: Right gripper + subtask_index: 0 +- subtask: Move the board eraser to the center of the whiteboard with left gripper + subtask_index: 1 +- subtask: Wipe off the writing on the board with right gripper + subtask_index: 2 +- subtask: Place the board eraser on the right side of board with right gripper + subtask_index: 3 +- subtask: Move the board eraser to the right of the whiteboard with right gripper + subtask_index: 4 +- subtask: Pick up the board eraser with right gripper + subtask_index: 5 +- subtask: Move the board eraser to the right of the whiteboard with left gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - lift - wipe - handover -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -87,13 +97,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -101,8 +108,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 38393 fps: 30 @@ -187,11 +193,9 @@ data_structure: 'Agilex_Cobot_Magic_erase_board_passing_left_to_right_qced_hardl |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -464,7 +468,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -472,7 +476,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -499,199 +502,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_erase_board_passing_left_to_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to place the eraser in the right, use the right gripper to - pick up the eraser, wipe the notes on the whiteboard clean, and then put it down. - sub_tasks: - - subtask: Right gripper - subtask_index: 0 - - subtask: Move the board eraser to the center of the whiteboard with left gripper - subtask_index: 1 - - subtask: Wipe off the writing on the board with right gripper - subtask_index: 2 - - subtask: Place the board eraser on the right side of board with right gripper - subtask_index: 3 - - subtask: Move the board eraser to the right of the whiteboard with right gripper - subtask_index: 4 - - subtask: Pick up the board eraser with right gripper - subtask_index: 5 - - subtask: Move the board eraser to the right of the whiteboard with left gripper - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: Left gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - lift - - wipe - - handover - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 38393 - dataset_size: 335.24 MB - data_structure: 'Agilex_Cobot_Magic_erase_board_passing_left_to_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board_passing_right_to_left.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board_passing_right_to_left.yaml index b4852177d94573020834ad42ba5cbc6737437420..bfc54fe6cda8e0f195a35008d5ded269f01f38dc 100644 --- a/dataset_info/Agilex_Cobot_Magic_erase_board_passing_right_to_left.yaml +++ b/dataset_info/Agilex_Cobot_Magic_erase_board_passing_right_to_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,32 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to place the eraser in the left, use the left - gripper to pick up the eraser, wipe the notes on the whiteboard clean, and then - put it down. +task_instruction: +- use the right gripper to place the eraser in the left, use the left gripper to pick + up the eraser, wipe the notes on the whiteboard clean, and then put it down. sub_tasks: -- Place the eraser with the left gripper -- Grasp the eraser with the left gripper -- Abnormal -- End -- Move the eraser to the left side of the blackboard with the right gripper -- Grasp the eraser and wipe the blackboard with the left gripper -- 'null' +- subtask: Place the eraser with the left gripper + subtask_index: 0 +- subtask: Grasp the eraser with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Move the eraser to the left side of the blackboard with the right gripper + subtask_index: 4 +- subtask: Grasp the eraser and wipe the blackboard with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - lift - wipe - handover -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +91,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 38184 fps: 30 @@ -184,11 +187,9 @@ data_structure: 'Agilex_Cobot_Magic_erase_board_passing_right_to_left_qced_hardl |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -461,7 +462,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -469,7 +470,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -496,193 +496,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_erase_board_passing_right_to_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to place the eraser in the left, use the left gripper to - pick up the eraser, wipe the notes on the whiteboard clean, and then put it down. - sub_tasks: - - subtask: Place the eraser with the left gripper - subtask_index: 0 - - subtask: Grasp the eraser with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Move the eraser to the left side of the blackboard with the right gripper - subtask_index: 4 - - subtask: Grasp the eraser and wipe the blackboard with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - lift - - wipe - - handover - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 38184 - dataset_size: 334.71 MB - data_structure: 'Agilex_Cobot_Magic_erase_board_passing_right_to_left_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (35 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board_right.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board_right.yaml index a476c395bb9e17bf0be033230979bcaea1aa7ca2..5b954e3578e5d9de17268d29ff3c79e566c595fe 100644 --- a/dataset_info/Agilex_Cobot_Magic_erase_board_right.yaml +++ b/dataset_info/Agilex_Cobot_Magic_erase_board_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to pick up the board eraser, wipe the notes - on the whiteboard clean, and then put them back in place. +task_instruction: +- use the right gripper to pick up the board eraser, wipe the notes on the whiteboard + clean, and then put them back in place. sub_tasks: -- Right gripper -- Wipe off the writing on the board with right gripper -- Place the board eraser on the right side of board with right gripper -- Move the board eraser to the right of the whiteboard with right gripper -- Pick up the board eraser with right gripper -- End -- 'null' +- subtask: Right gripper + subtask_index: 0 +- subtask: Wipe off the writing on the board with right gripper + subtask_index: 1 +- subtask: Place the board eraser on the right side of board with right gripper + subtask_index: 2 +- subtask: Move the board eraser to the right of the whiteboard with right gripper + subtask_index: 3 +- subtask: Pick up the board eraser with right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - lift - wipe - handover -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +91,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 28998 fps: 30 @@ -183,11 +187,9 @@ data_structure: 'Agilex_Cobot_Magic_erase_board_right_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -460,7 +462,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -468,7 +470,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -495,193 +496,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_erase_board_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to pick up the board eraser, wipe the notes on the whiteboard - clean, and then put them back in place. - sub_tasks: - - subtask: Right gripper - subtask_index: 0 - - subtask: Wipe off the writing on the board with right gripper - subtask_index: 1 - - subtask: Place the board eraser on the right side of board with right gripper - subtask_index: 2 - - subtask: Move the board eraser to the right of the whiteboard with right gripper - subtask_index: 3 - - subtask: Pick up the board eraser with right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - lift - - wipe - - handover - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 28998 - dataset_size: 249.99 MB - data_structure: 'Agilex_Cobot_Magic_erase_board_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_T-shirts.yaml b/dataset_info/Agilex_Cobot_Magic_fold_T-shirts.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c2230d0a7cd0aa1249f7d3aa65883e7c9ad288e9 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_T-shirts.yaml @@ -0,0 +1,444 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_T-shirts +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: black_T-shirt + level1: clothing + level2: black_T-shirt + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- fold the clothes on the table. +sub_tasks: +- subtask: Lift the black T-shirt with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Lift the black T-shirt with the right gripper + subtask_index: 2 +- subtask: Grasp the black T-shirt with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Fold the black T-shirt downward with the right gripper + subtask_index: 5 +- subtask: Grasp the black T-shirt with the right gripper + subtask_index: 6 +- subtask: Fold the black T-shirt downward with the left gripper + subtask_index: 7 +- subtask: Fold the black T-shirt from right to left with right gripper + subtask_index: 8 +- subtask: Use the left gripper to tidy up the clothes + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 +atomic_actions: +- grasp +- fold +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 78640 + fps: 30 + total_tasks: 11 + total_videos: 300 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 913.84 MB +frame_num: 78640 +dataset_size: 913.84 MB +data_structure: "Agilex_Cobot_Magic_fold_T-shirts_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_jeans_shorts_children's.yaml b/dataset_info/Agilex_Cobot_Magic_fold_jeans_shorts_children's.yaml index a96ef79dfe6a2a3530bfdb09a7f85f99b6dc15b3..137da5beefbc2fde9178f44bcb8ce6fe4948ef19 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_jeans_shorts_children's.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_jeans_shorts_children's.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,34 +51,44 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Fold the children's denim shorts in half with two grippers, then - fold them in half again, and use the left claw to place the folded children's denim - shorts on the tray. +task_instruction: +- Fold the children's denim shorts in half with two grippers, then fold them in half + again, and use the left claw to place the folded children's denim shorts on the + tray. sub_tasks: -- Grasp the blue trousers with the right gripper -- Fold the blue trousers upwards with the right gripper -- Fold the blue trousers from right to left with the right gripper -- Place the folded blue trousers on the green tray with the left gripper -- Grasp the blue trousers with the left gripper -- Fold the blue trousers upwards with the left gripper -- Fold the blue trousers from left to right with the left gripper -- end -- 'null' +- subtask: Grasp the blue trousers with the right gripper + subtask_index: 0 +- subtask: Fold the blue trousers upwards with the right gripper + subtask_index: 1 +- subtask: Fold the blue trousers from right to left with the right gripper + subtask_index: 2 +- subtask: Place the folded blue trousers on the green tray with the left gripper + subtask_index: 3 +- subtask: Grasp the blue trousers with the left gripper + subtask_index: 4 +- subtask: Fold the blue trousers upwards with the left gripper + subtask_index: 5 +- subtask: Fold the blue trousers from left to right with the left gripper + subtask_index: 6 +- subtask: end + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -86,13 +96,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -100,8 +107,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 60562 fps: 30 @@ -202,11 +208,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_jeans_shorts_children_s_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -479,7 +483,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -487,7 +491,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -514,214 +517,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_jeans_shorts_children's - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Fold the children's denim shorts in half with two grippers, then fold them in - half again, and use the left claw to place the folded children's denim shorts - on the tray. - sub_tasks: - - subtask: Grasp the blue trousers with the right gripper - subtask_index: 0 - - subtask: Fold the blue trousers upwards with the right gripper - subtask_index: 1 - - subtask: Fold the blue trousers from right to left with the right gripper - subtask_index: 2 - - subtask: Place the folded blue trousers on the green tray with the left gripper - subtask_index: 3 - - subtask: Grasp the blue trousers with the left gripper - subtask_index: 4 - - subtask: Fold the blue trousers upwards with the left gripper - subtask_index: 5 - - subtask: Fold the blue trousers from left to right with the left gripper - subtask_index: 6 - - subtask: end - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 60562 - dataset_size: 912.51 MB - data_structure: 'Agilex_Cobot_Magic_fold_jeans_shorts_children_s_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml b/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml index 0162169e52e22c6fc19d94c6639af6764763e702..b635ffd6ffb8e2080a8ca874bf214c2fdf8b3d5e 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_black.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,36 +51,49 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use two grippers to fold the black short sleeve, and use the left - claw to place the folded black short sleeve on the tray. +task_instruction: +- use two grippers to fold the black short sleeve, and use the left claw to place + the folded black short sleeve on the tray. sub_tasks: -- Lift the black T-shirt with the left gripper -- Lift the black T-shirt with the right gripper -- Grasp the black T-shirt with the left gripper -- Fold the black T-shirt downward with the right gripper -- Grasp the black T-shirt with the right gripper -- Fold the black T-shirt downward with the left gripper -- Fold the black T-shirt from right to left with right gripper -- abnormal -- end -- Fold the black T-shirt from left to right with left gripper -- Place the folded black T-shirt on the green tray with the left gripper -- 'null' +- subtask: Lift the black T-shirt with the left gripper + subtask_index: 0 +- subtask: Lift the black T-shirt with the right gripper + subtask_index: 1 +- subtask: Grasp the black T-shirt with the left gripper + subtask_index: 2 +- subtask: Fold the black T-shirt downward with the right gripper + subtask_index: 3 +- subtask: Grasp the black T-shirt with the right gripper + subtask_index: 4 +- subtask: Fold the black T-shirt downward with the left gripper + subtask_index: 5 +- subtask: Fold the black T-shirt from right to left with right gripper + subtask_index: 6 +- subtask: abnormal + subtask_index: 7 +- subtask: end + subtask_index: 8 +- subtask: Fold the black T-shirt from left to right with left gripper + subtask_index: 9 +- subtask: Place the folded black T-shirt on the green tray with the left gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -88,13 +101,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -102,8 +112,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 76223 fps: 30 @@ -188,11 +197,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_short_sleeve_black_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -465,7 +472,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -473,7 +480,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -500,203 +506,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_short_sleeve_black - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use two grippers to fold the black short sleeve, and use the left claw to place - the folded black short sleeve on the tray. - sub_tasks: - - subtask: Lift the black T-shirt with the left gripper - subtask_index: 0 - - subtask: Lift the black T-shirt with the right gripper - subtask_index: 1 - - subtask: Grasp the black T-shirt with the left gripper - subtask_index: 2 - - subtask: Fold the black T-shirt downward with the right gripper - subtask_index: 3 - - subtask: Grasp the black T-shirt with the right gripper - subtask_index: 4 - - subtask: Fold the black T-shirt downward with the left gripper - subtask_index: 5 - - subtask: Fold the black T-shirt from right to left with right gripper - subtask_index: 6 - - subtask: abnormal - subtask_index: 7 - - subtask: end - subtask_index: 8 - - subtask: Fold the black T-shirt from left to right with left gripper - subtask_index: 9 - - subtask: Place the folded black T-shirt on the green tray with the left gripper - subtask_index: 10 - - subtask: 'null' - subtask_index: 11 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 76223 - dataset_size: 999.69 MB - data_structure: 'Agilex_Cobot_Magic_fold_short_sleeve_black_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_white.yaml b/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_white.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c23dd3f4bc2530b24da7f6f764cf5b1be9335cef --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_short_sleeve_white.yaml @@ -0,0 +1,451 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_short_sleeve_white +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bedroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: white_T-shirt + level1: clothing + level2: white_T-shirt + level3: null + level4: null + level5: null +- object_name: green_tray + level1: kitchen_supplies + level2: green_tray + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use two grippers to fold the white short sleeve, and use the left claw to place + the folded white short sleeve on the tray. +sub_tasks: +- subtask: Lift the white T-shirt with the left gripper + subtask_index: 0 +- subtask: Place the folded white T-shirt on the green tray with the left gripper + subtask_index: 1 +- subtask: Fold the white T-shirt from left to right with left gripper + subtask_index: 2 +- subtask: Fold the white T-shirt from right to left with right gripper + subtask_index: 3 +- subtask: Grasp the white T-shirt with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Fold the white T-shirt downward with the left gripper + subtask_index: 6 +- subtask: Fold the white T-shirt downward with the right gripper + subtask_index: 7 +- subtask: Lift the white T-shirt with the right gripper + subtask_index: 8 +- subtask: Grasp the white T-shirt with the left gripper + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 +atomic_actions: +- grasp +- lift +- lower +- fold +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 70709 + fps: 30 + total_tasks: 11 + total_videos: 150 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 701.06 MB +frame_num: 70709 +dataset_size: 701.06 MB +data_structure: "Agilex_Cobot_Magic_fold_short_sleeve_white_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_shorts_khaki.yaml b/dataset_info/Agilex_Cobot_Magic_fold_shorts_khaki.yaml index 28638b2dfd7e8a5bbecc76ed3c2351af273930ef..3c4f3ee7c763a0d1581bfce04b293cc210664203 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_shorts_khaki.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_shorts_khaki.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,33 +51,43 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: fold the khaki jeans in half with two grippers, then fold them in - half again, and use the left claw to place the folded fold khaki jeans on the tray. +task_instruction: +- fold the khaki jeans in half with two grippers, then fold them in half again, and + use the left claw to place the folded fold khaki jeans on the tray. sub_tasks: -- Fold the khaki trousers from right to left with the right gripper -- Fold the khaki trousers upwards with the right gripper -- Fold the khaki trousers from left to right with the left gripper -- Fold the khaki trousers upwards with the left gripper -- Place the folded khaki trousers on the green tray with the left gripper -- Grasp the khaki trousers with the left gripper -- Grasp the khaki trousers with the right gripper -- end -- 'null' +- subtask: Fold the khaki trousers from right to left with the right gripper + subtask_index: 0 +- subtask: Fold the khaki trousers upwards with the right gripper + subtask_index: 1 +- subtask: Fold the khaki trousers from left to right with the left gripper + subtask_index: 2 +- subtask: Fold the khaki trousers upwards with the left gripper + subtask_index: 3 +- subtask: Place the folded khaki trousers on the green tray with the left gripper + subtask_index: 4 +- subtask: Grasp the khaki trousers with the left gripper + subtask_index: 5 +- subtask: Grasp the khaki trousers with the right gripper + subtask_index: 6 +- subtask: end + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +95,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +106,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 55392 fps: 30 @@ -185,11 +191,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_shorts_khaki_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -462,7 +466,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -470,7 +474,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -497,197 +500,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_shorts_khaki - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - fold the khaki jeans in half with two grippers, then fold them in half again, - and use the left claw to place the folded fold khaki jeans on the tray. - sub_tasks: - - subtask: Fold the khaki trousers from right to left with the right gripper - subtask_index: 0 - - subtask: Fold the khaki trousers upwards with the right gripper - subtask_index: 1 - - subtask: Fold the khaki trousers from left to right with the left gripper - subtask_index: 2 - - subtask: Fold the khaki trousers upwards with the left gripper - subtask_index: 3 - - subtask: Place the folded khaki trousers on the green tray with the left gripper - subtask_index: 4 - - subtask: Grasp the khaki trousers with the left gripper - subtask_index: 5 - - subtask: Grasp the khaki trousers with the right gripper - subtask_index: 6 - - subtask: end - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 55392 - dataset_size: 637.64 MB - data_structure: 'Agilex_Cobot_Magic_fold_shorts_khaki_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel.yaml index aa578650c8a573978c854cf1658bb9fb8069f939..7cf3be0f430da9a9153fd35707fe08a9cdba8a9d 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_towel.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,38 +51,54 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: fold the towels on the table. +task_instruction: +- fold the towels on the table. sub_tasks: -- Fold the white towel upwards with the right gripper -- Grasp the orange towel with the left gripper -- Grasp the orange towel with the right gripper -- Fold the orange towel upwards with the right gripper -- Fold the white towel upwards with the left gripper -- Fold the orange towel from right to left with right gripper -- Press on the orange towel with the left gripper -- Fold the white towel from right to left with right gripper -- End -- Arrange the orange towel with the right hand -- Grasp the white towel with the right gripper -- Fold the orange towel upwards with the left gripper -- Grasp the white towel with the left gripper -- Press on the white towel with the left gripper -- 'null' +- subtask: Fold the white towel upwards with the right gripper + subtask_index: 0 +- subtask: Grasp the orange towel with the left gripper + subtask_index: 1 +- subtask: Grasp the orange towel with the right gripper + subtask_index: 2 +- subtask: Fold the orange towel upwards with the right gripper + subtask_index: 3 +- subtask: Fold the white towel upwards with the left gripper + subtask_index: 4 +- subtask: Fold the orange towel from right to left with right gripper + subtask_index: 5 +- subtask: Press on the orange towel with the left gripper + subtask_index: 6 +- subtask: Fold the white towel from right to left with right gripper + subtask_index: 7 +- subtask: End + subtask_index: 8 +- subtask: Arrange the orange towel with the right hand + subtask_index: 9 +- subtask: Grasp the white towel with the right gripper + subtask_index: 10 +- subtask: Fold the orange towel upwards with the left gripper + subtask_index: 11 +- subtask: Grasp the white towel with the left gripper + subtask_index: 12 +- subtask: Press on the white towel with the left gripper + subtask_index: 13 +- subtask: 'null' + subtask_index: 14 atomic_actions: - grasp - fold - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -90,13 +106,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -104,8 +117,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 76161 fps: 30 @@ -190,11 +202,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_towel_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -467,7 +477,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -475,7 +485,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -502,208 +511,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_towel - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - fold the towels on the table. - sub_tasks: - - subtask: Fold the white towel upwards with the right gripper - subtask_index: 0 - - subtask: Grasp the orange towel with the left gripper - subtask_index: 1 - - subtask: Grasp the orange towel with the right gripper - subtask_index: 2 - - subtask: Fold the orange towel upwards with the right gripper - subtask_index: 3 - - subtask: Fold the white towel upwards with the left gripper - subtask_index: 4 - - subtask: Fold the orange towel from right to left with right gripper - subtask_index: 5 - - subtask: Press on the orange towel with the left gripper - subtask_index: 6 - - subtask: Fold the white towel from right to left with right gripper - subtask_index: 7 - - subtask: End - subtask_index: 8 - - subtask: Arrange the orange towel with the right hand - subtask_index: 9 - - subtask: Grasp the white towel with the right gripper - subtask_index: 10 - - subtask: Fold the orange towel upwards with the left gripper - subtask_index: 11 - - subtask: Grasp the white towel with the left gripper - subtask_index: 12 - - subtask: Press on the white towel with the left gripper - subtask_index: 13 - - subtask: 'null' - subtask_index: 14 - atomic_actions: - - grasp - - fold - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 76161 - dataset_size: 834.61 MB - data_structure: 'Agilex_Cobot_Magic_fold_towel_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_blue.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_blue.yaml new file mode 100644 index 0000000000000000000000000000000000000000..284db413b33ebf0c85084bb80dce7869ca393fc8 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_blue.yaml @@ -0,0 +1,450 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_towel_blue +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: basket + level1: home_storage + level2: basket + level3: null + level4: null + level5: null +- object_name: blue_towel + level1: daily_necessities + level2: blue_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- fold the towels on the table. +sub_tasks: +- subtask: 'Left hand: adjust the brown towel' + subtask_index: 0 +- subtask: 'Left hand: adjust the blue towel' + subtask_index: 1 +- subtask: 'Right hand: adjust the blue towel' + subtask_index: 2 +- subtask: 'Right hand: grab the bottom right corner of blue towel' + subtask_index: 3 +- subtask: 'Left hand: fold the blue towel from left to right' + subtask_index: 4 +- subtask: 'Left hand: fold the blue towel up' + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: 'Right hand: fold the blue towel up' + subtask_index: 7 +- subtask: 'Right hand: spread the blue towel flat on the table' + subtask_index: 8 +- subtask: 'Left hand: grab the bottom left corner of blue towel' + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 +atomic_actions: +- grasp +- fold +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 185 + total_frames: 146917 + fps: 30 + total_tasks: 11 + total_videos: 555 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 10.04 GB +frame_num: 146917 +dataset_size: 10.04 GB +data_structure: "Agilex_Cobot_Magic_fold_towel_blue_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (173 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:184 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_right_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_blue_tray.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_blue_tray.yaml index 06322f56b216b626262347a096be16fd893bcf41..cb920075d58c3a59cd4f4508ade09b8fc787e36c 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_towel_blue_tray.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_blue_tray.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,33 +51,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use both grippers to hold the edge of the towel and fold it forward.,use - the right gripper to hold the right edge and fold it again,use a gripper to place - the folded towel on the tray. +task_instruction: +- use both grippers to hold the edge of the towel and fold it forward.,use the right + gripper to hold the right edge and fold it again,use a gripper to place the folded + towel on the tray. sub_tasks: -- Fold the blue white towel upwards with the right gripper -- Fold the blue white towel upwards with the left gripper -- Grasp the blue white towel with the right gripper -- Fold the blue white towel from right to left with right gripper -- Grasp the blue white towel with the left gripper -- end -- Place the folded gray towel on the green tray with the right gripper -- 'null' +- subtask: Fold the blue white towel upwards with the right gripper + subtask_index: 0 +- subtask: Fold the blue white towel upwards with the left gripper + subtask_index: 1 +- subtask: Grasp the blue white towel with the right gripper + subtask_index: 2 +- subtask: Fold the blue white towel from right to left with right gripper + subtask_index: 3 +- subtask: Grasp the blue white towel with the left gripper + subtask_index: 4 +- subtask: end + subtask_index: 5 +- subtask: Place the folded gray towel on the green tray with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +94,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +105,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 63491 fps: 30 @@ -185,11 +190,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_towel_blue_tray_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -462,7 +465,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -470,7 +473,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -497,196 +499,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_towel_blue_tray - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use both grippers to hold the edge of the towel and fold it forward.,use the right - gripper to hold the right edge and fold it again,use a gripper to place the folded - towel on the tray. - sub_tasks: - - subtask: Fold the blue white towel upwards with the right gripper - subtask_index: 0 - - subtask: Fold the blue white towel upwards with the left gripper - subtask_index: 1 - - subtask: Grasp the blue white towel with the right gripper - subtask_index: 2 - - subtask: Fold the blue white towel from right to left with right gripper - subtask_index: 3 - - subtask: Grasp the blue white towel with the left gripper - subtask_index: 4 - - subtask: end - subtask_index: 5 - - subtask: Place the folded gray towel on the green tray with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 63491 - dataset_size: 935.31 MB - data_structure: 'Agilex_Cobot_Magic_fold_towel_blue_tray_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_brown.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_brown.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a03311a8a83c67c925b3e1b00061c6a63b6271ee --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_brown.yaml @@ -0,0 +1,453 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_towel_brown +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bathroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: basket + level1: home_storage + level2: basket + level3: null + level4: null + level5: null +- object_name: towel + level1: daily_necessities + level2: towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- fold the towel on the table three times with two grippers. +sub_tasks: +- subtask: 'Left hand: adjust the brown towel' + subtask_index: 0 +- subtask: 'Left hand: spread the brown towel flat on the table' + subtask_index: 1 +- subtask: 'Right hand: spread the brown towel flat on the table' + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: 'Left hand: fold the brown towel from left to right' + subtask_index: 4 +- subtask: 'Right hand: grab the bottom right corner of brown towel' + subtask_index: 5 +- subtask: 'Right hand: adjust the brown towel' + subtask_index: 6 +- subtask: 'Left hand: fold the brown towel up' + subtask_index: 7 +- subtask: 'Right hand: fold the brown towel up' + subtask_index: 8 +- subtask: 'Left hand: grab the bottom left corner of brown towel' + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 +atomic_actions: +- grasp +- unfold +- fold +- pick +- place +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 387 + total_frames: 283464 + fps: 30 + total_tasks: 11 + total_videos: 1161 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 18.63 GB +frame_num: 283464 +dataset_size: 18.63 GB +data_structure: "Agilex_Cobot_Magic_fold_towel_brown_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (375 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:386 + val: 334:376 + test: 376:417 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_head_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml index ebde96dc45e70ea324836714a681f17322309a92..6cdab5e056a6667503f191f7fd91dc04cc548eae 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,33 +51,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use both grippers to hold the edge of the towel and fold it forward.,use - the right gripper to hold the right edge and fold it again,use a gripper to place - the folded towel on the tray. +task_instruction: +- use both grippers to hold the edge of the towel and fold it forward.,use the right + gripper to hold the right edge and fold it again,use a gripper to place the folded + towel on the tray. sub_tasks: -- Fold the grey towel from left to right with left gripper -- Place the folded grey towel on the tray with the right gripper -- Abnormal -- Fold the grey towel from right to left with right gripper -- Fold the grey towel upwards -- End -- Place the folded grey towel on the tray with the left gripper -- 'null' +- subtask: Fold the grey towel from left to right with left gripper + subtask_index: 0 +- subtask: Place the folded grey towel on the tray with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Fold the grey towel from right to left with right gripper + subtask_index: 3 +- subtask: Fold the grey towel upwards + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the folded grey towel on the tray with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +94,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +105,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 53308 fps: 30 @@ -185,11 +190,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_towel_grey_tray_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -462,7 +465,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -470,7 +473,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -497,196 +499,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_towel_grey_tray - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use both grippers to hold the edge of the towel and fold it forward.,use the right - gripper to hold the right edge and fold it again,use a gripper to place the folded - towel on the tray. - sub_tasks: - - subtask: Fold the grey towel from left to right with left gripper - subtask_index: 0 - - subtask: Place the folded grey towel on the tray with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Fold the grey towel from right to left with right gripper - subtask_index: 3 - - subtask: Fold the grey towel upwards - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Place the folded grey towel on the tray with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 53308 - dataset_size: 724.56 MB - data_structure: 'Agilex_Cobot_Magic_fold_towel_grey_tray_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_pink_tray.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_pink_tray.yaml index f12f1fba72ce5580ab33178caafe7ebf02766608..71b82f366b8dbaab2826f59e0e56ba87da44bc9c 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_towel_pink_tray.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_pink_tray.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use both grippers to hold the edge of the towel and fold it forward.,use - the right gripper to hold the right edge and fold it again,use a gripper to place - the folded towel on the tray. +task_instruction: +- use both grippers to hold the edge of the towel and fold it forward.,use the right + gripper to hold the right edge and fold it again,use a gripper to place the folded + towel on the tray. sub_tasks: -- Fold the pink towel upwards -- End -- Place the folded pink towel on the tray with the left gripper -- Fold the pink towel from right to left with right gripper -- 'null' +- subtask: Fold the pink towel upwards + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the folded pink towel on the tray with the left gripper + subtask_index: 2 +- subtask: Fold the pink towel from right to left with right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -82,13 +88,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -96,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 63678 fps: 30 @@ -182,11 +184,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_towel_pink_tray_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -459,7 +459,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -467,7 +467,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -494,190 +493,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_towel_pink_tray - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use both grippers to hold the edge of the towel and fold it forward.,use the right - gripper to hold the right edge and fold it again,use a gripper to place the folded - towel on the tray. - sub_tasks: - - subtask: Fold the pink towel upwards - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the folded pink towel on the tray with the left gripper - subtask_index: 2 - - subtask: Fold the pink towel from right to left with right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 63678 - dataset_size: 766.05 MB - data_structure: 'Agilex_Cobot_Magic_fold_towel_pink_tray_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_purple.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_purple.yaml new file mode 100644 index 0000000000000000000000000000000000000000..25e2d03742787117f497921ab08b9793d984de29 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_purple.yaml @@ -0,0 +1,453 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_towel_purple +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bathroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: basket + level1: home_storage + level2: basket + level3: null + level4: null + level5: null +- object_name: towel + level1: daily_necessities + level2: towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- fold the towel on the table three times with two grippers. +sub_tasks: +- subtask: 'Right hand: spread the purple towel flat on the table' + subtask_index: 0 +- subtask: 'Left hand: fold the purple towel up' + subtask_index: 1 +- subtask: 'Right hand: fold the purple towel up' + subtask_index: 2 +- subtask: 'Left hand: fold the purple towel from left to right' + subtask_index: 3 +- subtask: 'Left hand: adjust the purple towel' + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'Left hand: spread the purple towel flat on the table' + subtask_index: 6 +- subtask: 'Right hand: grab the bottom right corner of purple towel' + subtask_index: 7 +- subtask: 'Right hand: adjust the purple towel' + subtask_index: 8 +- subtask: 'Left hand: grab the bottom left corner of purple towel' + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 +atomic_actions: +- grasp +- unfold +- fold +- pick +- place +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 175 + total_frames: 111222 + fps: 30 + total_tasks: 11 + total_videos: 525 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 7.28 GB +frame_num: 111222 +dataset_size: 7.28 GB +data_structure: "Agilex_Cobot_Magic_fold_towel_purple_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (163 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:174 + val: 175:197 + test: 197:219 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_head_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_tray_twice.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_tray_twice.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2d867d5b0f3b499ba2f30260f28c04ae05416379 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_tray_twice.yaml @@ -0,0 +1,467 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_towel_tray_twice +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: white_tray + level1: kitchen_supplies + level2: white_tray + level3: null + level4: null + level5: null +- object_name: white_long_towel + level1: daily_necessities + level2: white_long_towel + level3: null + level4: null + level5: null +- object_name: pink_long_towel + level1: daily_necessities + level2: pink_long_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- fold one of the towels by two grippers and place it on the tray,then fold another + towel by two grippers and place it on the folded towel. +sub_tasks: +- subtask: Grasp the pink towel with the right gripper + subtask_index: 0 +- subtask: Press the pink towel with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Fold the pink towel from right to left with right gripper + subtask_index: 3 +- subtask: Place the pink towel on the white towel with the right gripper + subtask_index: 4 +- subtask: Grasp the white towel with the left gripper + subtask_index: 5 +- subtask: Move the pink towel to the plate with the right gripper + subtask_index: 6 +- subtask: Abnormal + subtask_index: 7 +- subtask: Press the white towel with the left gripper + subtask_index: 8 +- subtask: Move the white towel to the plate with the right gripper + subtask_index: 9 +- subtask: Grasp the white towel with the right gripper + subtask_index: 10 +- subtask: Fold the pink towel upwards with the left gripper + subtask_index: 11 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 12 +- subtask: Place the white towel on the pink towel with the left gripper + subtask_index: 13 +- subtask: Fold the pink towel upwards with the right gripper + subtask_index: 14 +- subtask: Fold the white towel upwards with the right gripper + subtask_index: 15 +- subtask: Place the white towel on the pink towel with the right gripper + subtask_index: 16 +- subtask: Fold the white towel upwards with the left gripper + subtask_index: 17 +- subtask: Fold the white towel from right to left with right gripper + subtask_index: 18 +- subtask: 'null' + subtask_index: 19 +atomic_actions: +- grasp +- fold +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 195 + total_frames: 305054 + fps: 30 + total_tasks: 20 + total_videos: 585 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 3.54 GB +frame_num: 305054 +dataset_size: 3.54 GB +data_structure: "Agilex_Cobot_Magic_fold_towel_tray_twice_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (183 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:194 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml index 0c9160cb094c2c59f5ceb9aa097b685b29da6934..1e45819db888a7203a2f4b62063661de2917f5ac 100644 --- a/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,33 +51,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use both grippers to hold the edge of the towel and fold it forward.,use - the right gripper to hold the right edge and fold it again,use a gripper to place - the folded towel on the tray. +task_instruction: +- use both grippers to hold the edge of the towel and fold it forward.,use the right + gripper to hold the right edge and fold it again,use a gripper to place the folded + towel on the tray. sub_tasks: -- Abnormal -- Fold the yellow towel upwards -- End -- Fold the yellow towel from right to left with right gripper -- Fold the yellow towel from left to right with left gripper -- Place the folded yellow towel on the tray with the left gripper -- Place the folded yellow towel on the tray with the right gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Fold the yellow towel upwards + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Fold the yellow towel from right to left with right gripper + subtask_index: 3 +- subtask: Fold the yellow towel from left to right with left gripper + subtask_index: 4 +- subtask: Place the folded yellow towel on the tray with the left gripper + subtask_index: 5 +- subtask: Place the folded yellow towel on the tray with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - lift - lower - fold -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +94,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +105,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 48960 fps: 30 @@ -185,11 +190,9 @@ data_structure: 'Agilex_Cobot_Magic_fold_towel_yellow_tray_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -462,7 +465,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -470,7 +473,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -497,196 +499,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_fold_towel_yellow_tray - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use both grippers to hold the edge of the towel and fold it forward.,use the right - gripper to hold the right edge and fold it again,use a gripper to place the folded - towel on the tray. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Fold the yellow towel upwards - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Fold the yellow towel from right to left with right gripper - subtask_index: 3 - - subtask: Fold the yellow towel from left to right with left gripper - subtask_index: 4 - - subtask: Place the folded yellow towel on the tray with the left gripper - subtask_index: 5 - - subtask: Place the folded yellow towel on the tray with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - lift - - lower - - fold - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 48960 - dataset_size: 552.42 MB - data_structure: 'Agilex_Cobot_Magic_fold_towel_yellow_tray_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (37 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_heat_burger.yaml b/dataset_info/Agilex_Cobot_Magic_heat_burger.yaml new file mode 100644 index 0000000000000000000000000000000000000000..01f6662171adde701e19df364596310e659f2eb0 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_heat_burger.yaml @@ -0,0 +1,459 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_heat_burger +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: microwave_oven + level1: small_kitchen_appliances + level2: microwave_oven + level3: null + level4: null + level5: null +- object_name: plate + level1: kitchen_supplies + level2: plate + level3: null + level4: null + level5: null +- object_name: hamburger + level1: food + level2: hamburger + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- open the microwave oven with the right gripper, put the hamburger in, and then turn + it off with the left gripper. turn the button of the microwave oven with the right + gripper to heat up the hamburger. turn on the microwave oven, take out the hamburger + and place it on the table, then turn it off with the left gripper. +sub_tasks: +- subtask: Abnormal + subtask_index: 0 +- subtask: 'Right hand: grab the plate with the hamburger' + subtask_index: 1 +- subtask: 'Right hand: place the plate on the table' + subtask_index: 2 +- subtask: 'Right hand: press the switch' + subtask_index: 3 +- subtask: 'Right hand: place the plate in the microwave' + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'Right hand: twist timer button' + subtask_index: 6 +- subtask: 'Left hand: close the microwave door' + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 +atomic_actions: +- grasp +- pick +- place +- push +- turn +- press +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 819 + total_frames: 2478415 + fps: 30 + total_tasks: 9 + total_videos: 2457 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 131.36 GB +frame_num: 2478469 +dataset_size: 131.36 GB +data_structure: "Agilex_Cobot_Magic_heat_burger_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (807 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:818 + val: 802:902 + test: 902:1003 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_head_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_heat_sandwich.yaml b/dataset_info/Agilex_Cobot_Magic_heat_sandwich.yaml new file mode 100644 index 0000000000000000000000000000000000000000..adfa07f68cb05127c19baee473c5c85df14c2547 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_heat_sandwich.yaml @@ -0,0 +1,454 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_heat_sandwich +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: null + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: microwave_oven + level1: kitchen_supplies + level2: microwave_oven + level3: null + level4: null + level5: null +- object_name: plates + level1: kitchen_supplies + level2: plates + level3: null + level4: null + level5: null +- object_name: sandwich + level1: food + level2: sandwich + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use the right gripper to turn on the microwave oven, pick up the sandwich and put + it in. use the left gripper to turn off the microwave oven. use the right gripper + to rotate the microwave oven button to heat up the sandwich. then turn on the microwave + oven, take out the sandwich and place it on the table. use the left gripper to turn + off the microwave oven again. +sub_tasks: +- subtask: 'Right hand: place the plate on the table' + subtask_index: 0 +- subtask: 'Right hand: press the switch' + subtask_index: 1 +- subtask: 'Right hand: place the plate in the microwave' + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: 'Right hand: twist timer button' + subtask_index: 4 +- subtask: 'Right hand: grab the plate with the sandwich' + subtask_index: 5 +- subtask: 'Left hand: close the microwave door' + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- lift +- lower +- pressbutton +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 301 + total_frames: 979323 + fps: 30 + total_tasks: 8 + total_videos: 903 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 47.95 GB +frame_num: 979323 +dataset_size: 47.95 GB +data_structure: "Agilex_Cobot_Magic_heat_sandwich_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (289 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:300 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_right_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_make_sandwiche.yaml b/dataset_info/Agilex_Cobot_Magic_make_sandwiche.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5b0da21d81349a974e83d80728b0f611839b85c7 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_make_sandwiche.yaml @@ -0,0 +1,507 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_make_sandwiche +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: plate + level1: kitchen_supplies + level2: plate + level3: null + level4: null + level5: null +- object_name: storage_box + level1: storage_utensils + level2: storage_box + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: cheese + level1: food + level2: cheese + level3: null + level4: null + level5: null +- object_name: lettuce + level1: vegetables + level2: lettuce + level3: null + level4: null + level5: null +- object_name: bacon + level1: food + level2: bacon + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- make sandwiches in the middle plate with the food from the boxes on both sides. +sub_tasks: +- subtask: Left hand:place the bread slice on the cheese slice + subtask_index: 0 +- subtask: Right hand:grab the lettuce slice + subtask_index: 1 +- subtask: Right hand:grab the bacon + subtask_index: 2 +- subtask: Right hand:place the cheese slice on top of the bacon + subtask_index: 3 +- subtask: Left hand:place bacon on bacon + subtask_index: 4 +- subtask: Left hand:grab the bread slice + subtask_index: 5 +- subtask: Right hand:place bacon on lettuce slice + subtask_index: 6 +- subtask: Right hand:place the lettuce slice on the bread slice + subtask_index: 7 +- subtask: Left hand:Place the lettuce slice on the bread slice + subtask_index: 8 +- subtask: Left hand:place bacon on lettuce slice + subtask_index: 9 +- subtask: Right hand:place the bread slice on the cheese slice + subtask_index: 10 +- subtask: Right hand:place the bread slice on the plate + subtask_index: 11 +- subtask: Left hand:place the cheese slice on top of the bacon + subtask_index: 12 +- subtask: Left hand:grab the cheese slice + subtask_index: 13 +- subtask: Abnormal + subtask_index: 14 +- subtask: Left hand:Place the bread slices on the plate + subtask_index: 15 +- subtask: Left hand:place the lettuce slice on the bread slice + subtask_index: 16 +- subtask: Left hand:place the bread slice on the plate + subtask_index: 17 +- subtask: Right hand:grab the cheese slice + subtask_index: 18 +- subtask: Right hand:grab the bread slice + subtask_index: 19 +- subtask: Left hand:grab the bacon + subtask_index: 20 +- subtask: Left hand:Place bacon on lettuce slice + subtask_index: 21 +- subtask: End + subtask_index: 22 +- subtask: Right hand:place bacon on bacon + subtask_index: 23 +- subtask: Left hand:grab the lettuce slice + subtask_index: 24 +- subtask: Right hand:Place the cheese slice on top of the bacon + subtask_index: 25 +- subtask: 'null' + subtask_index: 26 +atomic_actions: +- grasp +- place +- pick +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 515 + total_frames: 813553 + fps: 30 + total_tasks: 27 + total_videos: 1545 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 53.82 GB +frame_num: 813553 +dataset_size: 53.82 GB +data_structure: "Agilex_Cobot_Magic_make_sandwiche_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (503 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:514 + val: 425:478 + test: 478:532 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_head_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml index 7d66481e26b255efb4aaf6fed6702caf73310d50..a573e62356f2c48e8d393304aa10ffd593467b0e 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: mouse level1: appliances level2: mouse @@ -51,29 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the right gripper organize the mouse on the mouse pad. +task_instruction: +- the right gripper organize the mouse on the mouse pad. sub_tasks: -- Grasp the mouse with the left gripper -- Abnormal -- Grasp the mouse with the right gripper -- Place the mouse on the mouse mat with the right gripper -- End -- Place the mouse on the mouse mat with the left gripper -- 'null' +- subtask: Grasp the mouse with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Grasp the mouse with the right gripper + subtask_index: 2 +- subtask: Place the mouse on the mouse mat with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the mouse on the mouse mat with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +89,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 112 total_frames: 49737 fps: 30 @@ -197,11 +201,9 @@ data_structure: 'Agilex_Cobot_Magic_move_mouse_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:111 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -474,7 +476,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -482,7 +484,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -509,207 +510,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_mouse - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office & workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the right gripper organize the mouse on the mouse pad. - sub_tasks: - - subtask: Grasp the mouse with the left gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Grasp the mouse with the right gripper - subtask_index: 2 - - subtask: Place the mouse on the mouse mat with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the mouse on the mouse mat with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 49737 - dataset_size: 386.89 MB - data_structure: 'Agilex_Cobot_Magic_move_mouse_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (100 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen.yaml index 6ebeeb151f2f2d3edd42813a37cee5c326d15d45..7a6cd81ef1baad6d007a2b71664d31ef490e121c 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -69,33 +69,44 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper organize the pen on the notebook then the right - gripper organize the mouse on the mouse pad. +task_instruction: +- the left gripper organize the pen on the notebook then the right gripper organize + the mouse on the mouse pad. sub_tasks: -- Grasp the mouse with the left gripper -- Grasp the marker with the left gripper -- Grasp the mouse with the right gripper -- Place the mouse on the mouse pad with the right gripper -- Place the marker on the notebook with the right gripper -- Grasp the marker with the right gripper -- end -- Place the mouse on the mouse pad with the left gripper -- Place the marker on the notebook with the left gripper -- 'null' +- subtask: Grasp the mouse with the left gripper + subtask_index: 0 +- subtask: Grasp the marker with the left gripper + subtask_index: 1 +- subtask: Grasp the mouse with the right gripper + subtask_index: 2 +- subtask: Place the mouse on the mouse pad with the right gripper + subtask_index: 3 +- subtask: Place the marker on the notebook with the right gripper + subtask_index: 4 +- subtask: Grasp the marker with the right gripper + subtask_index: 5 +- subtask: end + subtask_index: 6 +- subtask: Place the mouse on the mouse pad with the left gripper + subtask_index: 7 +- subtask: Place the marker on the notebook with the left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -103,13 +114,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -117,8 +125,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 30 total_frames: 22756 fps: 30 @@ -203,11 +210,9 @@ data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:29 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -480,7 +485,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -488,7 +493,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -515,198 +519,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_mouse_pen - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office & workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper organize the pen on the notebook then the right gripper organize - the mouse on the mouse pad. - sub_tasks: - - subtask: Grasp the mouse with the left gripper - subtask_index: 0 - - subtask: Grasp the marker with the left gripper - subtask_index: 1 - - subtask: Grasp the mouse with the right gripper - subtask_index: 2 - - subtask: Place the mouse on the mouse pad with the right gripper - subtask_index: 3 - - subtask: Place the marker on the notebook with the right gripper - subtask_index: 4 - - subtask: Grasp the marker with the right gripper - subtask_index: 5 - - subtask: end - subtask_index: 6 - - subtask: Place the mouse on the mouse pad with the left gripper - subtask_index: 7 - - subtask: Place the marker on the notebook with the left gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 22756 - dataset_size: 276.67 MB - data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (18 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth.yaml index b9d9e6ef740b4da30956265fd7427336aceedf07..1a88cf564ac93544a4575d364f68967e03c472c3 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -69,35 +69,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper organize the pen on the notebook then the right - gripper organize the mouse on the mouse pad. +task_instruction: +- the left gripper organize the pen on the notebook then the right gripper organize + the mouse on the mouse pad. sub_tasks: -- Right gripper -- Pick up the mouse with right gripper -- Pick up the marker pen with right gripper -- Pick up the mouse with left gripper -- End -- Place the marker pen on the notebook with right gripper -- Place the mouse on the mouse pad with left gripper -- Place the marker pen on the notebook with left gripper -- Left gripper -- Pick up the marker pen with left gripper -- Place the mouse on the mouse pad with right gripper -- 'null' +- subtask: Right gripper + subtask_index: 0 +- subtask: Pick up the mouse with right gripper + subtask_index: 1 +- subtask: Pick up the marker pen with right gripper + subtask_index: 2 +- subtask: Pick up the mouse with left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the marker pen on the notebook with right gripper + subtask_index: 5 +- subtask: Place the mouse on the mouse pad with left gripper + subtask_index: 6 +- subtask: Place the marker pen on the notebook with left gripper + subtask_index: 7 +- subtask: Left gripper + subtask_index: 8 +- subtask: Pick up the marker pen with left gripper + subtask_index: 9 +- subtask: Place the mouse on the mouse pad with right gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -105,13 +118,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -119,8 +129,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 30 total_frames: 20668 fps: 30 @@ -205,11 +214,9 @@ data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth_qced_hardlin |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:29 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -482,7 +489,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -490,7 +497,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -517,202 +523,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office & workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper organize the pen on the notebook then the right gripper organize - the mouse on the mouse pad. - sub_tasks: - - subtask: Right gripper - subtask_index: 0 - - subtask: Pick up the mouse with right gripper - subtask_index: 1 - - subtask: Pick up the marker pen with right gripper - subtask_index: 2 - - subtask: Pick up the mouse with left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the marker pen on the notebook with right gripper - subtask_index: 5 - - subtask: Place the mouse on the mouse pad with left gripper - subtask_index: 6 - - subtask: Place the marker pen on the notebook with left gripper - subtask_index: 7 - - subtask: Left gripper - subtask_index: 8 - - subtask: Pick up the marker pen with left gripper - subtask_index: 9 - - subtask: Place the mouse on the mouse pad with right gripper - subtask_index: 10 - - subtask: 'null' - subtask_index: 11 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 20668 - dataset_size: 513.70 MB - data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (18 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth.yaml index 465f78c1de2f642e965ea4dc15ab6ae09f67d939..8bf79911b5b657e3070779c0f6f41c42c51dd76f 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -69,33 +69,44 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper organize the pen on the notebook then the right - gripper organize the mouse on the mouse pad. +task_instruction: +- the left gripper organize the pen on the notebook then the right gripper organize + the mouse on the mouse pad. sub_tasks: -- Place the mouse on the mouse pad with the left gripper -- Grasp the marker with the left gripper -- Grasp the mouse with the right gripper -- Place the mouse on the mouse pad with the right gripper -- Place the marker on the notebook with the right gripper -- Grasp the marker with the right gripper -- Grasp the mouse with the left gripper -- end -- Place the marker on the notebook with the left gripper -- 'null' +- subtask: Place the mouse on the mouse pad with the left gripper + subtask_index: 0 +- subtask: Grasp the marker with the left gripper + subtask_index: 1 +- subtask: Grasp the mouse with the right gripper + subtask_index: 2 +- subtask: Place the mouse on the mouse pad with the right gripper + subtask_index: 3 +- subtask: Place the marker on the notebook with the right gripper + subtask_index: 4 +- subtask: Grasp the marker with the right gripper + subtask_index: 5 +- subtask: Grasp the mouse with the left gripper + subtask_index: 6 +- subtask: end + subtask_index: 7 +- subtask: Place the marker on the notebook with the left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -103,13 +114,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -117,8 +125,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 30 total_frames: 19527 fps: 30 @@ -203,11 +210,9 @@ data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth_qced_hardlin |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:29 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -480,7 +485,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -488,7 +493,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -515,198 +519,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office & workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper organize the pen on the notebook then the right gripper organize - the mouse on the mouse pad. - sub_tasks: - - subtask: Place the mouse on the mouse pad with the left gripper - subtask_index: 0 - - subtask: Grasp the marker with the left gripper - subtask_index: 1 - - subtask: Grasp the mouse with the right gripper - subtask_index: 2 - - subtask: Place the mouse on the mouse pad with the right gripper - subtask_index: 3 - - subtask: Place the marker on the notebook with the right gripper - subtask_index: 4 - - subtask: Grasp the marker with the right gripper - subtask_index: 5 - - subtask: Grasp the mouse with the left gripper - subtask_index: 6 - - subtask: end - subtask_index: 7 - - subtask: Place the marker on the notebook with the left gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 19527 - dataset_size: 316.01 MB - data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (18 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth.yaml index 9e5f066787f75a4b0cc46453ed8ef90b809aeed2..07082179fa25b15a774a79e508593aeb640ca02b 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -66,33 +66,44 @@ objects: &id006 level2: khaki_table_cloths task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper organize the pen on the notebook then the right - gripper organize the mouse on the mouse pad. +task_instruction: +- the left gripper organize the pen on the notebook then the right gripper organize + the mouse on the mouse pad. sub_tasks: -- Grasp the mouse with the left gripper -- Grasp the marker with the left gripper -- Grasp the mouse with the right gripper -- Place the mouse on the mouse pad with the right gripper -- Place the marker on the notebook with the right gripper -- Grasp the marker with the right gripper -- end -- Place the mouse on the mouse pad with the left gripper -- Place the marker on the notebook with the left gripper -- 'null' +- subtask: Grasp the mouse with the left gripper + subtask_index: 0 +- subtask: Grasp the marker with the left gripper + subtask_index: 1 +- subtask: Grasp the mouse with the right gripper + subtask_index: 2 +- subtask: Place the mouse on the mouse pad with the right gripper + subtask_index: 3 +- subtask: Place the marker on the notebook with the right gripper + subtask_index: 4 +- subtask: Grasp the marker with the right gripper + subtask_index: 5 +- subtask: end + subtask_index: 6 +- subtask: Place the mouse on the mouse pad with the left gripper + subtask_index: 7 +- subtask: Place the marker on the notebook with the left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -100,13 +111,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -114,8 +122,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 30 total_frames: 20954 fps: 30 @@ -200,11 +207,9 @@ data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth_qced_hardlin |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:29 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -477,7 +482,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -485,7 +490,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -512,198 +516,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office & workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper organize the pen on the notebook then the right gripper organize - the mouse on the mouse pad. - sub_tasks: - - subtask: Grasp the mouse with the left gripper - subtask_index: 0 - - subtask: Grasp the marker with the left gripper - subtask_index: 1 - - subtask: Grasp the mouse with the right gripper - subtask_index: 2 - - subtask: Place the mouse on the mouse pad with the right gripper - subtask_index: 3 - - subtask: Place the marker on the notebook with the right gripper - subtask_index: 4 - - subtask: Grasp the marker with the right gripper - subtask_index: 5 - - subtask: end - subtask_index: 6 - - subtask: Place the mouse on the mouse pad with the left gripper - subtask_index: 7 - - subtask: Place the marker on the notebook with the left gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 20954 - dataset_size: 347.28 MB - data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (18 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth.yaml index e5082274933aebee9f931f834c440b6884d82dfa..d14ab98ed8903db06536729f0300b9ad7ac5d82e 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -69,35 +69,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper organize the pen on the notebook then the right - gripper organize the mouse on the mouse pad. +task_instruction: +- the left gripper organize the pen on the notebook then the right gripper organize + the mouse on the mouse pad. sub_tasks: -- Right gripper -- Pick up the mouse with right gripper -- Pick up the marker pen with right gripper -- Pick up the mouse with left gripper -- End -- Place the marker pen on the notebook with right gripper -- Place the mouse on the mouse pad with left gripper -- Place the marker pen on the notebook with left gripper -- Left gripper -- Pick up the marker pen with left gripper -- Place the mouse on the mouse pad with right gripper -- 'null' +- subtask: Right gripper + subtask_index: 0 +- subtask: Pick up the mouse with right gripper + subtask_index: 1 +- subtask: Pick up the marker pen with right gripper + subtask_index: 2 +- subtask: Pick up the mouse with left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the marker pen on the notebook with right gripper + subtask_index: 5 +- subtask: Place the mouse on the mouse pad with left gripper + subtask_index: 6 +- subtask: Place the marker pen on the notebook with left gripper + subtask_index: 7 +- subtask: Left gripper + subtask_index: 8 +- subtask: Pick up the marker pen with left gripper + subtask_index: 9 +- subtask: Place the mouse on the mouse pad with right gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -105,13 +118,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -119,8 +129,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 30 total_frames: 20689 fps: 30 @@ -205,11 +214,9 @@ data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:29 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -482,7 +489,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -490,7 +497,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -517,202 +523,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office & workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper organize the pen on the notebook then the right gripper organize - the mouse on the mouse pad. - sub_tasks: - - subtask: Right gripper - subtask_index: 0 - - subtask: Pick up the mouse with right gripper - subtask_index: 1 - - subtask: Pick up the marker pen with right gripper - subtask_index: 2 - - subtask: Pick up the mouse with left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the marker pen on the notebook with right gripper - subtask_index: 5 - - subtask: Place the mouse on the mouse pad with left gripper - subtask_index: 6 - - subtask: Place the marker pen on the notebook with left gripper - subtask_index: 7 - - subtask: Left gripper - subtask_index: 8 - - subtask: Pick up the marker pen with left gripper - subtask_index: 9 - - subtask: Place the mouse on the mouse pad with right gripper - subtask_index: 10 - - subtask: 'null' - subtask_index: 11 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 20689 - dataset_size: 664.79 MB - data_structure: 'Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (18 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_object.yaml b/dataset_info/Agilex_Cobot_Magic_move_object.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1d12fca0fa57f9116e21fda575b209af68b49dc5 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_move_object.yaml @@ -0,0 +1,1185 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_move_object +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial & convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: pink_long_towel + level1: daily_necessities + level2: pink_long_towel + level3: null + level4: null + level5: null +- object_name: blue_long_towel + level1: daily_necessities + level2: blue_long_towel + level3: null + level4: null + level5: null +- object_name: round_plate + level1: kitchen_supplies + level2: round_plate + level3: null + level4: null + level5: null +- object_name: rectangular_plate + level1: kitchen_supplies + level2: rectangular_plate + level3: null + level4: null + level5: null +- object_name: pot_spoon + level1: kitchen_supplies + level2: pot_spoon + level3: null + level4: null + level5: null +- object_name: wok + level1: kitchen_supplies + level2: wok + level3: null + level4: null + level5: null +- object_name: corn + level1: food + level2: corn + level3: null + level4: null + level5: null +- object_name: egg_tart + level1: food + level2: egg_tart + level3: null + level4: null + level5: null +- object_name: egg_yolk_pastries + level1: food + level2: egg_yolk_pastrie + level3: null + level4: null + level5: null +- object_name: coffee capsule + level1: beverages + level2: coffee capsule + level3: null + level4: null + level5: null +- object_name: coke (bottled) + level1: beverages + level2: coke (bottled) + level3: null + level4: null + level5: null +- object_name: red_bull_canned_drink + level1: beverages + level2: red_bull_canned_drink + level3: null + level4: null + level5: null +- object_name: yibao + level1: beverages + level2: yibao + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: eastern_leaves + level1: beverages + level2: eastern_leaves + level3: null + level4: null + level5: null +- object_name: purple_incense + level1: daily_necessities + level2: purple_incense + level3: null + level4: null + level5: null +- object_name: duck + level1: toys + level2: duck + level3: null + level4: null + level5: null +- object_name: toothpaste + level1: daily_necessities + level2: toothpaste + level3: null + level4: null + level5: null +- object_name: deli_water-based_marker + level1: stationery + level2: deli_water-based_marker + level3: null + level4: null + level5: null +- object_name: bouncy_ball + level1: toys + level2: bouncy_ball + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the gripper move the object. +sub_tasks: +- subtask: Grasp the coffee capsule with the right gripper + subtask_index: 0 +- subtask: Grasp the toothpaste with the left gripper + subtask_index: 1 +- subtask: Grasp the YiBao mineral water with the right gripper + subtask_index: 2 +- subtask: Place the toothpaste on the table with the left gripper + subtask_index: 3 +- subtask: Place the corn into the green plate with the right gripper + subtask_index: 4 +- subtask: Place the egg tart into the pink plate with the right gripper + subtask_index: 5 +- subtask: Grasp the blue towel with the left gripper + subtask_index: 6 +- subtask: Place the blue bowl into the green plate with the right gripper + subtask_index: 7 +- subtask: Place the mango into the blue bowl with the left gripper + subtask_index: 8 +- subtask: Grasp the egg tart with the left gripper + subtask_index: 9 +- subtask: Place the USB box on the table with the left gripper + subtask_index: 10 +- subtask: Place the pink pot on the table with the left gripper + subtask_index: 11 +- subtask: Place the red marker pen on the table with the right gripper + subtask_index: 12 +- subtask: Place the Jia Duo Bao into the green bowl with the left gripper + subtask_index: 13 +- subtask: Place the colorful ball into the pink pot with the right gripper + subtask_index: 14 +- subtask: Place the coffee capsule into the pink pot with the left gripper + subtask_index: 15 +- subtask: Grasp the black box with the left gripper + subtask_index: 16 +- subtask: Place the colorful ball into the pink plate with the right gripper + subtask_index: 17 +- subtask: Grasp the blue bowl with the right gripper + subtask_index: 18 +- subtask: Place the shampoo into the pink plate with the left gripper + subtask_index: 19 +- subtask: Place the red bull into the pink pot with the right gripper + subtask_index: 20 +- subtask: Place the pink towel into the green plate with the right gripper + subtask_index: 21 +- subtask: Place the pink plate on the table with the left gripper + subtask_index: 22 +- subtask: Place the egg tart into the green bowl with the right gripper + subtask_index: 23 +- subtask: Grasp the Incense box with the right gripper + subtask_index: 24 +- subtask: Place the shampoo into the green plate with the left gripper + subtask_index: 25 +- subtask: Place the mango into the blue bowl with the left gripper + subtask_index: 26 +- subtask: Place the USB boxon the table with the left gripper + subtask_index: 27 +- subtask: Place the cooking shovel on the table with the left gripper + subtask_index: 28 +- subtask: Grasp the USB box with the right gripper + subtask_index: 29 +- subtask: Place the XX on the table with the left gripper + subtask_index: 30 +- subtask: Place the toothpaste on the table with the right gripper + subtask_index: 31 +- subtask: Place the colorful ball on the table with the left gripper + subtask_index: 32 +- subtask: Place the YiBao mineral water on the table with the left gripper + subtask_index: 33 +- subtask: Place the blue towel into the green plate with the left gripper + subtask_index: 34 +- subtask: Place the corn into the pink plate with the right gripper + subtask_index: 35 +- subtask: Grasp the green bowl with the right gripper + subtask_index: 36 +- subtask: Place the mango into the pink pot with the right gripper + subtask_index: 37 +- subtask: Place the coffee capsule into the pink plate with the right gripper + subtask_index: 38 +- subtask: Place the coffee capsule into the pink pot with the right gripper + subtask_index: 39 +- subtask: Place the red bull into the green bowl with the right gripper + subtask_index: 40 +- subtask: Place the mango into the green plate with the left gripper + subtask_index: 41 +- subtask: Place the green bowl on the table with the right gripper + subtask_index: 42 +- subtask: Place the brown box into the green bowl with the right gripper + subtask_index: 43 +- subtask: Place the colorful ball into the blue bowl with the left gripper + subtask_index: 44 +- subtask: Place the shampoo on the table with the left gripper + subtask_index: 45 +- subtask: Grasp the toothpaste with the right gripper + subtask_index: 46 +- subtask: Place the red bull into the green plate with the right gripper + subtask_index: 47 +- subtask: Grasp the coffee capsule with the left grippe + subtask_index: 48 +- subtask: Grasp the Luckin Coffee box with the left gripper + subtask_index: 49 +- subtask: Place the mango into the pink pot with the left gripper + subtask_index: 50 +- subtask: Place the coffee capsule into the green plate with the right gripper + subtask_index: 51 +- subtask: Place the persimmon into the pink pot with the left gripper + subtask_index: 52 +- subtask: Place the shampoo into the pink plate with the right gripper + subtask_index: 53 +- subtask: Place the corn on the table with the right gripper + subtask_index: 54 +- subtask: Grasp the coffee capsule with the left gripper + subtask_index: 55 +- subtask: Grasp the pink pot with the left gripper + subtask_index: 56 +- subtask: Place the mango on the table with the left gripper + subtask_index: 57 +- subtask: Place the egg tart on the table with the left gripper + subtask_index: 58 +- subtask: Place the shampoo into the blue bowl with the right gripper + subtask_index: 59 +- subtask: Grasp the USB box with the right gripper + subtask_index: 60 +- subtask: Place the coke into the green plate with the left gripper + subtask_index: 61 +- subtask: Place the egg yolk pastry into the blue bowl with the left gripper + subtask_index: 62 +- subtask: Place the Incense box into the pink plate with the right gripper + subtask_index: 63 +- subtask: Place the pink pot on the table with the left gripper + subtask_index: 64 +- subtask: Place the blue towel on the table with the left gripper + subtask_index: 65 +- subtask: Grasp the Luckin Coffee box with the right gripper + subtask_index: 66 +- subtask: Place the cooking shovel into the pink pot with the right gripper + subtask_index: 67 +- subtask: Place the Incense box into the pink plate with the left gripper + subtask_index: 68 +- subtask: Grasp the egg yolk pastry with the right gripper + subtask_index: 69 +- subtask: Grasp the red marker pen with the left gripper + subtask_index: 70 +- subtask: Grasp the USB box with the left gripper + subtask_index: 71 +- subtask: Place the Jia Duo Bao into the blue bowl with the left gripper + subtask_index: 72 +- subtask: Place the egg yolk pastry into the green bowl with the left gripper + subtask_index: 73 +- subtask: Place the red bull into the pink plate with the left gripper + subtask_index: 74 +- subtask: Place the pink pot into the green plate with the left gripper + subtask_index: 75 +- subtask: Place the red bull on the table with the right gripper + subtask_index: 76 +- subtask: Grasp the coke with the left gripper + subtask_index: 77 +- subtask: Grasp the corn with the right gripper + subtask_index: 78 +- subtask: Place the cooking shovel into the green bowl with the right gripper + subtask_index: 79 +- subtask: Place the corn on the table with the left gripper + subtask_index: 80 +- subtask: Place the red bull into the blue bowl with the left gripper + subtask_index: 81 +- subtask: Place the pink pot into the blue bowl with the right gripper + subtask_index: 82 +- subtask: Place the XX into the green plate with the left gripper + subtask_index: 83 +- subtask: Place the YiBao mineral water into the blue bowl with the right gripper + subtask_index: 84 +- subtask: Grasp the coke with the right gripper + subtask_index: 85 +- subtask: Place the mango into the pink plate with the right gripper + subtask_index: 86 +- subtask: Place the egg tart into the green plate with the left gripper + subtask_index: 87 +- subtask: Place the cooking shovel on the table with the right gripper + subtask_index: 88 +- subtask: Place the red marker pen into the pink plate with the right gripper + subtask_index: 89 +- subtask: Place the Incense box on the table with the right gripper + subtask_index: 90 +- subtask: Place the colorful ball into the blue bowl with the right gripper + subtask_index: 91 +- subtask: Place the XX into the pink pot with the left gripper + subtask_index: 92 +- subtask: Place the blue bowl into the pink plate with the left gripper + subtask_index: 93 +- subtask: Place the mango on the table with the left gripper + subtask_index: 94 +- subtask: Place the blue bowl into the blue bowl with the left gripper + subtask_index: 95 +- subtask: Place the red marker pen into the green plate with the right gripper + subtask_index: 96 +- subtask: Place the corn into the pink plate with the left gripper + subtask_index: 97 +- subtask: Grasp the brown box with the right gripper + subtask_index: 98 +- subtask: Place the Incense box into the pink pot with the left gripper + subtask_index: 99 +- subtask: Place the USB box on the table with the left gripper + subtask_index: 100 +- subtask: Place the coke on the table with the left gripper + subtask_index: 101 +- subtask: Place the colorful ball into the pink plate with the right gripper + subtask_index: 102 +- subtask: Grasp the egg yolk pastry with the right gripper + subtask_index: 103 +- subtask: Place the egg tart into the pink pot with the left gripper + subtask_index: 104 +- subtask: Place the Luckin Coffee box into the pink plate with the left gripper + subtask_index: 105 +- subtask: Place the shampoo on the table with the left gripper + subtask_index: 106 +- subtask: Place the egg tart on the table with the left gripper + subtask_index: 107 +- subtask: Grasp the Jia Duo Baowith the right gripper + subtask_index: 108 +- subtask: Place the XX into the blue bowl with the right gripper + subtask_index: 109 +- subtask: Grasp the red duck with the left gripper + subtask_index: 110 +- subtask: Place the pink towel into the pink plate with the left gripper + subtask_index: 111 +- subtask: Place the blue towel on the table with the left gripper + subtask_index: 112 +- subtask: Place the black box on the table with the right gripper + subtask_index: 113 +- subtask: Place the blue bowl on the table with the right gripper + subtask_index: 114 +- subtask: Place the YiBao mineral water on the table with the right gripper + subtask_index: 115 +- subtask: Place the shampoo into the blue bowl with the right gripper + subtask_index: 116 +- subtask: Abnormal + subtask_index: 117 +- subtask: Place the egg yolk pastry into the pink pot with the right gripper + subtask_index: 118 +- subtask: Grasp the xx with the left gripper + subtask_index: 119 +- subtask: Place the Incense box into the pink pot with the right gripper + subtask_index: 120 +- subtask: Grasp the egg yolk pastry with the left gripper + subtask_index: 121 +- subtask: Place the YiBao mineral water into the green plate with the right gripper + subtask_index: 122 +- subtask: Place the pink pot on the table with the right gripper + subtask_index: 123 +- subtask: Place the colorful ball into the green plate with the left gripper + subtask_index: 124 +- subtask: Place the blue towel on the table with the left gripper + subtask_index: 125 +- subtask: Place the egg tart into the blue bowl with the left gripper + subtask_index: 126 +- subtask: Place the cooking shovel into the blue bowl with the left gripper + subtask_index: 127 +- subtask: Place the blue towel into the green plate with the right gripper + subtask_index: 128 +- subtask: 'Grasp the egg yolk pastry with the left gripper + + ' + subtask_index: 129 +- subtask: Place the pink pot into the green plate with the right gripper + subtask_index: 130 +- subtask: Grasp the mango with the left gripper + subtask_index: 131 +- subtask: Grasp the YiBao mineral water with the left gripper + subtask_index: 132 +- subtask: Place the pink towel into the pink plate with the right gripper + subtask_index: 133 +- subtask: Grasp the blue towel with the left gripper + subtask_index: 134 +- subtask: Place the USB box on the table with the left gripper + subtask_index: 135 +- subtask: Place the pink towel into the green bowl with the right gripper + subtask_index: 136 +- subtask: Place the Jia Duo Bao on the table with the right gripper + subtask_index: 137 +- subtask: Place the USB box on the table with the right gripper + subtask_index: 138 +- subtask: Grasp the persimmon with the left gripper + subtask_index: 139 +- subtask: Place the egg yolk pastry into the pink plate with the right gripper + subtask_index: 140 +- subtask: Grasp the red bull with the left gripper + subtask_index: 141 +- subtask: Grasp the XX with the left gripper + subtask_index: 142 +- subtask: Place the shampoo into the green bowl with the left gripper + subtask_index: 143 +- subtask: Place the mango into the pink plate with the left gripper + subtask_index: 144 +- subtask: Place the blue towel on the table with the left gripper + subtask_index: 145 +- subtask: Grasp the red bull with the right gripper + subtask_index: 146 +- subtask: Grasp the cooking shovel with the right gripper + subtask_index: 147 +- subtask: Grasp the black box with the right gripper + subtask_index: 148 +- subtask: Place the green bowl on the table with the left gripper + subtask_index: 149 +- subtask: Place the cooking shovel into the green plate with the right gripper + subtask_index: 150 +- subtask: Place the cooking shovel into the blue bowl with the right gripper + subtask_index: 151 +- subtask: Grasp the Incense box with the left gripper + subtask_index: 152 +- subtask: Grasp the colorful ball with the left gripper + subtask_index: 153 +- subtask: Place the Incense box on the table with the left gripper + subtask_index: 154 +- subtask: Place the mango into the blue bowl with the right gripper + subtask_index: 155 +- subtask: Place the YiBao mineral water into the pink plate with the left gripper + subtask_index: 156 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 157 +- subtask: Place the XX on the table with the right gripper + subtask_index: 158 +- subtask: Place the egg yolk pastry into the pink pot with the left gripper + subtask_index: 159 +- subtask: Place the coffee capsule into the pink plate with the left gripper + subtask_index: 160 +- subtask: Grasp the green bowl with the left gripper + subtask_index: 161 +- subtask: Grasp the Luckin Coffee box with the right gripper + subtask_index: 162 +- subtask: Place the egg tart on the table with the right gripper + subtask_index: 163 +- subtask: Place the Jia Duo Bao on the table with the left gripper + subtask_index: 164 +- subtask: Place the toothpaste on the table with the left gripper + subtask_index: 165 +- subtask: Place the brown box into the green plate with the left gripper + subtask_index: 166 +- subtask: Place the red marker pen into the green plate with the left gripper + subtask_index: 167 +- subtask: Place the colorful ball on the table with the left gripper + subtask_index: 168 +- subtask: Place the red bull on the table with the left gripper + subtask_index: 169 +- subtask: Grasp the blue towel with the left gripper + subtask_index: 170 +- subtask: Place the blue towel into the green bowl with the right gripper + subtask_index: 171 +- subtask: Place the coffee capsule into the blue bowl with the right gripper + subtask_index: 172 +- subtask: Grasp the mango with the right gripper + subtask_index: 173 +- subtask: Place the coke into the green plate with the right gripper + subtask_index: 174 +- subtask: Place the egg yolk pastry into the pink plate with the left gripper + subtask_index: 175 +- subtask: Place the YiBao mineral water on the table with the left gripper + subtask_index: 176 +- subtask: Place the coffee capsule on the table with the left gripper + subtask_index: 177 +- subtask: Place the XX into the green plate with the right gripper + subtask_index: 178 +- subtask: Place the Jia Duo Bao into the green plate with the right gripper + subtask_index: 179 +- subtask: Place the pink towel on the table with the right gripper + subtask_index: 180 +- subtask: Place the USB box on the table with the right gripper + subtask_index: 181 +- subtask: Place the egg yolk pastry on the table with the right gripper + subtask_index: 182 +- subtask: Place the Jia Duo Bao into the pink plate with the left gripper + subtask_index: 183 +- subtask: Place the black box on the table with the left gripper + subtask_index: 184 +- subtask: Grasp the pink plate with the left gripper + subtask_index: 185 +- subtask: Place the coffee capsule on the table with the right gripper + subtask_index: 186 +- subtask: Place the mango on the table with the right gripper + subtask_index: 187 +- subtask: Grasp the corn with the left gripper + subtask_index: 188 +- subtask: Place the shampoo on the table with the right gripper + subtask_index: 189 +- subtask: Place the Jia Duo Bao into the pink plate with the right gripper + subtask_index: 190 +- subtask: Place the pink towel into the green plate with the left gripper + subtask_index: 191 +- subtask: Place the egg yolk pastry into the green bowl with the right gripper + subtask_index: 192 +- subtask: Place the red bull into the blue bowl with the right gripper + subtask_index: 193 +- subtask: Place the cooking shovel into the green bowl with the right gripper + subtask_index: 194 +- subtask: Grasp the XX with the right gripper + subtask_index: 195 +- subtask: Place the brown box into the green plate with the left gripper + subtask_index: 196 +- subtask: Place the blue towel on the table with the right gripper + subtask_index: 197 +- subtask: Grasp the cooking shovel with the left gripper + subtask_index: 198 +- subtask: Grasp the cooking shovel with the left gripper + subtask_index: 199 +- subtask: Place the red marker pen on the table with the left gripper + subtask_index: 200 +- subtask: Place the XX into the pink plate with the right gripper + subtask_index: 201 +- subtask: Place the Incense box into the green plate with the right gripper + subtask_index: 202 +- subtask: Place the pink towel on the table with the left gripper + subtask_index: 203 +- subtask: Place the cooking shovel on the table with the right gripper + subtask_index: 204 +- subtask: Grasp the red marker pen with the right gripper + subtask_index: 205 +- subtask: Place the XX into the pink plate with the left gripper + subtask_index: 206 +- subtask: Grasp the colorful ball with the right gripper + subtask_index: 207 +- subtask: Place the egg yolk pastry into the blue bowl with the left gripper + subtask_index: 208 +- subtask: Place the coke on the table with the left gripper + subtask_index: 209 +- subtask: Place the blue bowl into the green plate with the left gripper + subtask_index: 210 +- subtask: Place the Incense box into the blue bowl with the left gripper + subtask_index: 211 +- subtask: Place the cooking shovel into the green plate with the left gripper + subtask_index: 212 +- subtask: Place the colorful ball on the table with the left gripper + subtask_index: 213 +- subtask: Place the egg yolk pastry into the green plate with the right gripper + subtask_index: 214 +- subtask: Grasp the red marker pen cwith the right gripper + subtask_index: 215 +- subtask: Grasp the pink towel with the right gripper + subtask_index: 216 +- subtask: Grasp the shampoo with the left gripper + subtask_index: 217 +- subtask: Place the brown box the table with the right gripper + subtask_index: 218 +- subtask: Place the YiBao mineral water into the pink plate with the right gripper + subtask_index: 219 +- subtask: Grasp the Jia Duo Bao with the right gripper + subtask_index: 220 +- subtask: Place the colorful ball into the pink plate with the left gripper + subtask_index: 221 +- subtask: Place the egg yolk pastry into the blue bowl with the right gripper + subtask_index: 222 +- subtask: Place the egg tart into the pink pot with the left gripper + subtask_index: 223 +- subtask: Grasp the egg tart with the right gripper + subtask_index: 224 +- subtask: Place the xx into the pink plate with the left gripper + subtask_index: 225 +- subtask: Place the red duck on the table with the left gripper + subtask_index: 226 +- subtask: Grasp the Jia Duo Bao with the left gripper + subtask_index: 227 +- subtask: Place the Incense box into the pink plate with the left gripper + subtask_index: 228 +- subtask: Place the egg tart on the table with the left gripper + subtask_index: 229 +- subtask: Grasp the Jia Duo Bao with the left gripper + subtask_index: 230 +- subtask: Place the Incense box on the table with the left gripper + subtask_index: 231 +- subtask: Place the Luckin Coffee box on the table with the left gripper + subtask_index: 232 +- subtask: Place the coke on the table with the right gripper + subtask_index: 233 +- subtask: Place the pink towel on the table with the left gripper + subtask_index: 234 +- subtask: Place the red bull into the pink plate with the left gripper + subtask_index: 235 +- subtask: Place the coffee capsule into the green plate with the left gripper + subtask_index: 236 +- subtask: Place the blue towel into the green plate with the left gripper + subtask_index: 237 +- subtask: Place the shampoo into the green plate with the right gripper + subtask_index: 238 +- subtask: Place the mango into the green plate with the right gripper + subtask_index: 239 +- subtask: Place the colorful ball into the pink pot with the left gripper + subtask_index: 240 +- subtask: Place the Jia Duo Bao into the blue bowl with the right gripper + subtask_index: 241 +- subtask: Grasp the USB box with the left gripper + subtask_index: 242 +- subtask: Place the Jia Duo Bao into the green plate with the left gripper + subtask_index: 243 +- subtask: Place the corn into the green plate with the left gripper + subtask_index: 244 +- subtask: Place the Luckin Coffee box on the table with the left gripper + subtask_index: 245 +- subtask: Place the Jia Duo Bao into the pink pot with the right gripper + subtask_index: 246 +- subtask: Place the egg tart into the pink plate with the left gripper + subtask_index: 247 +- subtask: Place the blue square on the table with the right gripper + subtask_index: 248 +- subtask: Place the red marker pen into the blue bowl with the right gripper + subtask_index: 249 +- subtask: Grasp the red bull with the right gripper + subtask_index: 250 +- subtask: Place the Luckin Coffee box on the table with the right gripper + subtask_index: 251 +- subtask: Place the egg tart into the pink pot with the right gripper + subtask_index: 252 +- subtask: Place the colorful ball into the green bowl with the left gripper + subtask_index: 253 +- subtask: Grasp the blue towel with the right gripper + subtask_index: 254 +- subtask: Place the egg tart into the green bowl with the left gripper + subtask_index: 255 +- subtask: Place the Luckin Coffee box on the table with the right gripper + subtask_index: 256 +- subtask: Grasp the brown box with the left gripper + subtask_index: 257 +- subtask: Place the brown box on the table with the left gripper + subtask_index: 258 +- subtask: Place the brown box on the table with the right gripper + subtask_index: 259 +- subtask: Place the corn on the table with the left gripper + subtask_index: 260 +- subtask: Grasp the shampoo with the right gripper + subtask_index: 261 +- subtask: Place the blue bowl on the table with the left gripper + subtask_index: 262 +- subtask: End + subtask_index: 263 +- subtask: Place the Jia Duo Bao into the green bowl with the right gripper + subtask_index: 264 +- subtask: Place the colorful ball on the table with the right gripper + subtask_index: 265 +- subtask: Place the coke into the blue bowl with the left gripper + subtask_index: 266 +- subtask: Grasp the blue square with the right gripper + subtask_index: 267 +- subtask: Place the coke into the pink pot with the left gripper + subtask_index: 268 +- subtask: Grasp the blue bowl with the left gripper + subtask_index: 269 +- subtask: Place the egg yolk pastry on the table with the left gripper + subtask_index: 270 +- subtask: Place the egg yolk pastry on the table with the left gripper + subtask_index: 271 +- subtask: Grasp the pink pot with the right gripper + subtask_index: 272 +- subtask: 'null' + subtask_index: 273 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 1163 + total_frames: 478405 + fps: 30 + total_tasks: 274 + total_videos: 3489 + total_chunks: 2 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 5.45 GB +frame_num: 478405 +dataset_size: 5.45 GB +data_structure: 'Agilex_Cobot_Magic_Agilex_Cobot_Magic_move_object_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | |-- chunk-000 + + | | |-- episode_000000.parquet + + | | |-- episode_000001.parquet + + | | |-- episode_000002.parquet + + | | |-- episode_000003.parquet + + | | |-- episode_000004.parquet + + | | |-- episode_000005.parquet + + | | |-- episode_000006.parquet + + | | |-- episode_000007.parquet + + | | |-- episode_000008.parquet + + | | |-- episode_000009.parquet + + | | |-- episode_000010.parquet + + | | `-- episode_000011.parquet + + | | `-- ... (988 more entries) + + | `-- chunk-001 + + | |-- episode_001000.parquet + + | |-- episode_001001.parquet + + | |-- episode_001002.parquet + + | |-- episode_001003.parquet + + | |-- episode_001004.parquet + + | |-- episode_001005.parquet + + | |-- episode_001006.parquet + + | |-- episode_001007.parquet + + | |-- episode_001008.parquet + + | |-- episode_001009.parquet + + | |-- episode_001010.parquet + + | `-- episode_001011.parquet + + | `-- ... (151 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | |-- chunk-000 + + | | |-- observation.images.cam_head_rgb + + | | |-- observation.images.cam_left_wrist_rgb + + | | `-- observation.images.cam_right_wrist_rgb + + | `-- chunk-001 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:1162 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml index 460ff2f03a8edd5e3ab4ee5a1f16cb790e3fc2a3..9a6cd5b998c9db6e233f7ae590375237589e6f94 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -213,28 +213,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the gripper move the object. +task_instruction: +- the gripper move the object. sub_tasks: -- Grasp the XX with the right gripper -- Place the XX on the table with the left gripper -- Place the XX on the table with the right gripper -- Grasp the XX with the left gripper -- End -- 'null' +- subtask: Grasp the XX with the right gripper + subtask_index: 0 +- subtask: Place the XX on the table with the left gripper + subtask_index: 1 +- subtask: Place the XX on the table with the right gripper + subtask_index: 2 +- subtask: Grasp the XX with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -242,13 +249,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -256,8 +260,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 199 total_frames: 103966 fps: 30 @@ -358,11 +361,9 @@ data_structure: 'Agilex_Cobot_Magic_move_object_beige_tablecloth_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:198 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -635,7 +636,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -643,7 +644,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -670,205 +670,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_object_beige_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial & convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the gripper move the object. - sub_tasks: - - subtask: Grasp the XX with the right gripper - subtask_index: 0 - - subtask: Place the XX on the table with the left gripper - subtask_index: 1 - - subtask: Place the XX on the table with the right gripper - subtask_index: 2 - - subtask: Grasp the XX with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 103966 - dataset_size: 1.34 GB - data_structure: 'Agilex_Cobot_Magic_move_object_beige_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (187 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_black_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_black_tablecloth.yaml index 66c529a8a653fa80390cfae2ca9766a3d977c0a7..8cb743314436e2466071ab3aeb3343122473ea93 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_object_black_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_object_black_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -213,163 +213,503 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the gripper move the object. +task_instruction: +- the gripper move the object. sub_tasks: -- Place the XX on the table with the left gripper -- Grasp the pen container with the right gripper -- Grasp the blue bowl with the left gripper -- Grasp the pen container with the right gripper -- Place the square chewing gum on the table with the left gripper -- Grasp the pink towel with the left gripper -- Place the blue pot on the table with the right gripper -- Place the brown towel on the table with the left gripper -- Place the waffle on the table with the left gripper -- Place the white duck on the table with the right gripper -- Place the white blackboard erasure on the table with the left gripper -- Grasp the compass with the left gripper -- Grasp the green lemon with the right gripper -- Grasp the cyan cup with the left gripper -- Place the blue bowl on the table with the left gripper -- Grasp the mint candy with the right gripper -- Grasp the mint candy with the left gripper -- Grasp the cyan cup with the right gripper -- Grasp the green lemon with the left gripper -- "Grasp the white blackboard erasure\t with the left gripper" -- Grasp the white duck with the left gripper -- Grasp the square chewing gum with the left gripper -- Grasp the mint candy with the left gripper -- Place the brown towel on the table with the left gripper -- Place the on the table with the right gripper -- Grasp the XX with the right gripper -- Place the blue blackboard erasure on the table with the left gripper -- Place the white blackboard erasure on the table with the left gripper -- Place the teapot on the table with the right gripper -- Grasp the white blackboard erasure with the left gripper -- Grasp the Square chewing gum with the right gripper -- Place the pink towel on the table with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Grasp the with the left gripper -- Place the hard facial cleanser on the table with the right gripper -- Grasp the white duck with the left gripper -- Place the mint candy on the table with the right gripper -- Grasp the brown towel with the left gripper -- Grasp the chocolate with the right gripper -- Grasp the brown towel with the right gripper -- Grasp the mango with the left gripper -- Place the white blackboard erasure on the table with the left gripper -- Grasp the brown towel with the left gripper -- Place the mango on the table with the right gripper -- Place the brown towel on the table with the left gripper -- Place the teapot on the table with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Place the pink towel on the table with the right gripper -- Grasp the pink towel with the right gripper -- Place the waffle on the table with the right gripper -- Place the blue pot on the table with the left gripper -- Place the pen container on the table with the right gripper -- Grasp the blue pot with the right gripper -- Grasp the pen container with the left gripper -- Grasp the green lemon with the right gripper -- Grasp the eggplant with the right gripper -- "Place the white blackboard erasure\t on the table with the left gripper" -- Place the eggplant on the table with the left gripper -- Place the green lemon on the table with the left gripper -- Place the XX on the table with the right gripper -- End -- Grasp the white blackboard erasure with the right gripper -- Place the white duck on the table with the left gripper -- Place the orange on the table with the left gripper -- Grasp the eggplant with the right gripper -- Grasp the brown towel with the right gripper -- Place the square chewing gum on the table with the right gripper -- Place the compass on the table with the right gripper -- Grasp the orange with the left gripper -- Place the hard facial cleanser on the table with the left gripper -- Grasp the blue blackboard erasure with the left gripper -- Place the brown towel on the table with the right gripper -- Grasp the blue blackboard erasure with the right gripper -- Grasp the eggplant with the left gripper -- Grasp the square chewing gum with the right gripper -- Place the mango on the table with the left gripper -- Grasp the waffle with the right gripper -- "Grasp the white blackboard erasure\t with the right gripper" -- Place the teapot on the table with the left gripper -- Place the waffle on the table with the right gripper -- Grasp the mango with the right gripper -- Grasp the white blackboard erasure with the left gripper -- Grasp the blue pot with the left gripper -- Place the square chewing gum on the table with the right gripper -- Place the pen container on the table with the right gripper -- Place the green lemon on the table with the right gripper -- Place the blue blackboard erasure on the table with the right gripper -- Grasp the teapot with the left gripper -- Place the borwn towel on the table with the right gripper -- Place the pen container on the table with the left gripper -- Grasp the compass with the left gripper -- Grasp the fruit candy with the right gripper -- Place the cyan cup on the table with the left gripper -- Place the fruit candy on the table with the right gripper -- Place the compass on the table with the left gripper -- Grasp the white duck with the right gripper -- Grasp the waffle with the right gripper -- Place the blue pot on the table with the left gripper -- Grasp the mango with the left gripper -- Grasp the teapot with the right gripper -- Place the teacup on the table with the left gripper -- Grasp the pink towel with the right gripper -- Grasp the blue pot with the left gripper -- Place the mango on the table with the right gripper -- Place the mangosteen on the table with the left gripper -- Grasp the square chewing gum with the left gripper -- Grasp the square chewing gum with the right gripper -- Grasp the compass with the right gripper -- Place the tea cup on the table with the right gripper -- Place the teapot on the table with the left gripper -- Grasp the cyan cup with the left gripper -- Grasp the eggplant with the right gripper -- Place the white blackboard erasure on the table with the right gripper -- Place the green lemon on the table with the left gripper -- Grasp the hard facial cleanser with the left gripper -- "Place the white blackboard erasure\t on the table with the right gripper" -- Place the mango on the table with the left gripper -- Grasp the tea cup with the right gripper -- Grasp the waffle with the left gripper -- Grasp the pink towel with the left gripper -- Place the hard facial cleanser on the table with the right gripper -- Place the eggplant on the table with the right gripper -- Place the mint candy on the table with the left gripper -- Grasp the mint candy with the right gripper -- Grasp the chocolate with the right gripper -- Place the cyan cup on the table with the right gripper -- Grasp the XX with the left gripper -- Place the chocolate on the table with the right gripper -- Grasp the mint candy with the right gripper -- Grasp the with the right gripper -- Place the brown towel on the table with the right gripper -- Grasp the green lemon with the left gripper -- Place the Mangosteen on the table with the right gripper -- Place the square chewing gum on the table with the left gripper -- Grasp the teacup with the left gripper -- Place the chocolate on the table with the right gripper -- Grasp the blue bowl with the left gripper -- Grasp the teapot with the left gripper -- Place the pink bowel on the table with the right gripper -- Place the mint candy on the table with the right gripper -- 'null' +- subtask: Place the XX on the table with the left gripper + subtask_index: 0 +- subtask: 'Grasp the pen container with the right gripper + + ' + subtask_index: 1 +- subtask: 'Grasp the blue bowl with the left gripper + + ' + subtask_index: 2 +- subtask: Grasp the pen container with the right gripper + subtask_index: 3 +- subtask: 'Place the square chewing gum on the table with the left gripper + + ' + subtask_index: 4 +- subtask: 'Grasp the pink towel with the left gripper + + ' + subtask_index: 5 +- subtask: 'Place the blue pot on the table with the right gripper + + ' + subtask_index: 6 +- subtask: 'Place the brown towel on the table with the left gripper + + ' + subtask_index: 7 +- subtask: 'Place the waffle on the table with the left gripper + + ' + subtask_index: 8 +- subtask: 'Place the white duck on the table with the right gripper + + ' + subtask_index: 9 +- subtask: 'Place the white blackboard erasure on the table with the left gripper + + ' + subtask_index: 10 +- subtask: 'Grasp the compass with the left gripper + + ' + subtask_index: 11 +- subtask: 'Grasp the green lemon with the right gripper + + ' + subtask_index: 12 +- subtask: 'Grasp the cyan cup with the left gripper + + ' + subtask_index: 13 +- subtask: 'Place the blue bowl on the table with the left gripper + + ' + subtask_index: 14 +- subtask: 'Grasp the mint candy with the right gripper + + ' + subtask_index: 15 +- subtask: Grasp the mint candy with the left gripper + subtask_index: 16 +- subtask: 'Grasp the cyan cup with the right gripper + + ' + subtask_index: 17 +- subtask: 'Grasp the green lemon with the left gripper + + ' + subtask_index: 18 +- subtask: "Grasp the white blackboard erasure\t with the left gripper\n" + subtask_index: 19 +- subtask: 'Grasp the white duck with the left gripper + + ' + subtask_index: 20 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 21 +- subtask: 'Grasp the mint candy with the left gripper + + ' + subtask_index: 22 +- subtask: Place the brown towel on the table with the left gripper + subtask_index: 23 +- subtask: 'Place the on the table with the right gripper + + ' + subtask_index: 24 +- subtask: Grasp the XX with the right gripper + subtask_index: 25 +- subtask: 'Place the blue blackboard erasure on the table with the left gripper + + ' + subtask_index: 26 +- subtask: Place the white blackboard erasure on the table with the left gripper + subtask_index: 27 +- subtask: 'Place the teapot on the table with the right gripper + + ' + subtask_index: 28 +- subtask: Grasp the white blackboard erasure with the left gripper + subtask_index: 29 +- subtask: 'Grasp the Square chewing gum with the right gripper + + ' + subtask_index: 30 +- subtask: 'Place the pink towel on the table with the left gripper + + ' + subtask_index: 31 +- subtask: 'Grasp the hard facial cleanser with the right gripper + + ' + subtask_index: 32 +- subtask: 'Grasp the with the left gripper + + ' + subtask_index: 33 +- subtask: Place the hard facial cleanser on the table with the right gripper + subtask_index: 34 +- subtask: 'Grasp the white duck with the left gripper + + ' + subtask_index: 35 +- subtask: Place the mint candy on the table with the right gripper + subtask_index: 36 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 37 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 38 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 39 +- subtask: Grasp the mango with the left gripper + subtask_index: 40 +- subtask: 'Place the white blackboard erasure on the table with the left gripper + + ' + subtask_index: 41 +- subtask: 'Grasp the brown towel with the left gripper + + ' + subtask_index: 42 +- subtask: 'Place the mango on the table with the right gripper + + ' + subtask_index: 43 +- subtask: 'Place the brown towel on the table with the left gripper + + ' + subtask_index: 44 +- subtask: Place the teapot on the table with the left gripper + subtask_index: 45 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 46 +- subtask: 'Place the pink towel on the table with the right gripper + + ' + subtask_index: 47 +- subtask: 'Grasp the pink towel with the right gripper + + ' + subtask_index: 48 +- subtask: Place the waffle on the table with the right gripper + subtask_index: 49 +- subtask: 'Place the blue pot on the table with the left gripper + + ' + subtask_index: 50 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 51 +- subtask: 'Grasp the blue pot with the right gripper + + ' + subtask_index: 52 +- subtask: 'Grasp the pen container with the left gripper + + ' + subtask_index: 53 +- subtask: 'Grasp the green lemon with the right gripper + + ' + subtask_index: 54 +- subtask: Grasp the eggplant with the right gripper + subtask_index: 55 +- subtask: "Place the white blackboard erasure\t on the table with the left gripper\n" + subtask_index: 56 +- subtask: 'Place the eggplant on the table with the left gripper + + ' + subtask_index: 57 +- subtask: Place the green lemon on the table with the left gripper + subtask_index: 58 +- subtask: Place the XX on the table with the right gripper + subtask_index: 59 +- subtask: End + subtask_index: 60 +- subtask: Grasp the white blackboard erasure with the right gripper + subtask_index: 61 +- subtask: 'Place the white duck on the table with the left gripper + + ' + subtask_index: 62 +- subtask: 'Place the orange on the table with the left gripper + + ' + subtask_index: 63 +- subtask: 'Grasp the eggplant with the right gripper + + ' + subtask_index: 64 +- subtask: 'Grasp the brown towel with the right gripper + + ' + subtask_index: 65 +- subtask: 'Place the square chewing gum on the table with the right gripper + + ' + subtask_index: 66 +- subtask: 'Place the compass on the table with the right gripper + + ' + subtask_index: 67 +- subtask: 'Grasp the orange with the left gripper + + ' + subtask_index: 68 +- subtask: 'Place the hard facial cleanser on the table with the left gripper + + ' + subtask_index: 69 +- subtask: 'Grasp the blue blackboard erasure with the left gripper + + ' + subtask_index: 70 +- subtask: 'Place the brown towel on the table with the right gripper + + ' + subtask_index: 71 +- subtask: 'Grasp the blue blackboard erasure with the right gripper + + ' + subtask_index: 72 +- subtask: 'Grasp the eggplant with the left gripper + + ' + subtask_index: 73 +- subtask: 'Grasp the square chewing gum with the right gripper + + ' + subtask_index: 74 +- subtask: 'Place the mango on the table with the left gripper + + ' + subtask_index: 75 +- subtask: Grasp the waffle with the right gripper + subtask_index: 76 +- subtask: "Grasp the white blackboard erasure\t with the right gripper\n" + subtask_index: 77 +- subtask: 'Place the teapot on the table with the left gripper + + ' + subtask_index: 78 +- subtask: 'Place the waffle on the table with the right gripper + + ' + subtask_index: 79 +- subtask: 'Grasp the mango with the right gripper + + ' + subtask_index: 80 +- subtask: 'Grasp the white blackboard erasure with the left gripper + + ' + subtask_index: 81 +- subtask: 'Grasp the blue pot with the left gripper + + ' + subtask_index: 82 +- subtask: Place the square chewing gum on the table with the right gripper + subtask_index: 83 +- subtask: 'Place the pen container on the table with the right gripper + + ' + subtask_index: 84 +- subtask: 'Place the green lemon on the table with the right gripper + + ' + subtask_index: 85 +- subtask: 'Place the blue blackboard erasure on the table with the right gripper + + ' + subtask_index: 86 +- subtask: 'Grasp the teapot with the left gripper + + ' + subtask_index: 87 +- subtask: 'Place the borwn towel on the table with the right gripper + + ' + subtask_index: 88 +- subtask: 'Place the pen container on the table with the left gripper + + ' + subtask_index: 89 +- subtask: 'Grasp the compass with the left gripper + + ' + subtask_index: 90 +- subtask: 'Grasp the fruit candy with the right gripper + + ' + subtask_index: 91 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 92 +- subtask: 'Place the fruit candy on the table with the right gripper + + ' + subtask_index: 93 +- subtask: 'Place the compass on the table with the left gripper + + ' + subtask_index: 94 +- subtask: 'Grasp the white duck with the right gripper + + ' + subtask_index: 95 +- subtask: 'Grasp the waffle with the right gripper + + ' + subtask_index: 96 +- subtask: 'Place the blue pot on the table with the left gripper + + ' + subtask_index: 97 +- subtask: 'Grasp the mango with the left gripper + + ' + subtask_index: 98 +- subtask: 'Grasp the teapot with the right gripper + + ' + subtask_index: 99 +- subtask: 'Place the teacup on the table with the left gripper + + ' + subtask_index: 100 +- subtask: 'Grasp the pink towel with the right gripper + + ' + subtask_index: 101 +- subtask: 'Grasp the blue pot with the left gripper + + ' + subtask_index: 102 +- subtask: 'Place the mango on the table with the right gripper + + ' + subtask_index: 103 +- subtask: Place the mangosteen on the table with the left gripper + subtask_index: 104 +- subtask: 'Grasp the square chewing gum with the left gripper + + ' + subtask_index: 105 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 106 +- subtask: 'Grasp the compass with the right gripper + + ' + subtask_index: 107 +- subtask: 'Place the tea cup on the table with the right gripper + + ' + subtask_index: 108 +- subtask: 'Place the teapot on the table with the left gripper + + ' + subtask_index: 109 +- subtask: 'Grasp the cyan cup with the left gripper + + ' + subtask_index: 110 +- subtask: 'Grasp the eggplant with the right gripper + + ' + subtask_index: 111 +- subtask: Place the white blackboard erasure on the table with the right gripper + subtask_index: 112 +- subtask: 'Place the green lemon on the table with the left gripper + + ' + subtask_index: 113 +- subtask: 'Grasp the hard facial cleanser with the left gripper + + ' + subtask_index: 114 +- subtask: "Place the white blackboard erasure\t on the table with the right gripper\n" + subtask_index: 115 +- subtask: Place the mango on the table with the left gripper + subtask_index: 116 +- subtask: 'Grasp the tea cup with the right gripper + + ' + subtask_index: 117 +- subtask: 'Grasp the waffle with the left gripper + + ' + subtask_index: 118 +- subtask: 'Grasp the pink towel with the left gripper + + ' + subtask_index: 119 +- subtask: 'Place the hard facial cleanser on the table with the right gripper + + ' + subtask_index: 120 +- subtask: 'Place the eggplant on the table with the right gripper + + ' + subtask_index: 121 +- subtask: 'Place the mint candy on the table with the left gripper + + ' + subtask_index: 122 +- subtask: 'Grasp the mint candy with the right gripper + + ' + subtask_index: 123 +- subtask: 'Grasp the chocolate with the right gripper + + ' + subtask_index: 124 +- subtask: 'Place the cyan cup on the table with the right gripper + + ' + subtask_index: 125 +- subtask: Grasp the XX with the left gripper + subtask_index: 126 +- subtask: Place the chocolate on the table with the right gripper + subtask_index: 127 +- subtask: Grasp the mint candy with the right gripper + subtask_index: 128 +- subtask: 'Grasp the with the right gripper + + ' + subtask_index: 129 +- subtask: Place the brown towel on the table with the right gripper + subtask_index: 130 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 131 +- subtask: Place the Mangosteen on the table with the right gripper + subtask_index: 132 +- subtask: Place the square chewing gum on the table with the left gripper + subtask_index: 133 +- subtask: 'Grasp the teacup with the left gripper + + ' + subtask_index: 134 +- subtask: 'Place the chocolate on the table with the right gripper + + ' + subtask_index: 135 +- subtask: 'Grasp the blue bowl with the left gripper + + ' + subtask_index: 136 +- subtask: Grasp the teapot with the left gripper + subtask_index: 137 +- subtask: 'Place the pink bowel on the table with the right gripper + + ' + subtask_index: 138 +- subtask: 'Place the mint candy on the table with the right gripper + + ' + subtask_index: 139 +- subtask: 'null' + subtask_index: 140 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -377,13 +717,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -391,8 +728,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 200 total_frames: 119721 fps: 30 @@ -477,11 +813,9 @@ data_structure: 'Agilex_Cobot_Magic_move_object_black_tablecloth_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:199 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -754,7 +1088,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -762,7 +1096,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -789,657 +1122,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_object_black_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial & convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the gripper move the object. - sub_tasks: - - subtask: Place the XX on the table with the left gripper - subtask_index: 0 - - subtask: 'Grasp the pen container with the right gripper - - ' - subtask_index: 1 - - subtask: 'Grasp the blue bowl with the left gripper - - ' - subtask_index: 2 - - subtask: Grasp the pen container with the right gripper - subtask_index: 3 - - subtask: 'Place the square chewing gum on the table with the left gripper - - ' - subtask_index: 4 - - subtask: 'Grasp the pink towel with the left gripper - - ' - subtask_index: 5 - - subtask: 'Place the blue pot on the table with the right gripper - - ' - subtask_index: 6 - - subtask: 'Place the brown towel on the table with the left gripper - - ' - subtask_index: 7 - - subtask: 'Place the waffle on the table with the left gripper - - ' - subtask_index: 8 - - subtask: 'Place the white duck on the table with the right gripper - - ' - subtask_index: 9 - - subtask: 'Place the white blackboard erasure on the table with the left gripper - - ' - subtask_index: 10 - - subtask: 'Grasp the compass with the left gripper - - ' - subtask_index: 11 - - subtask: 'Grasp the green lemon with the right gripper - - ' - subtask_index: 12 - - subtask: 'Grasp the cyan cup with the left gripper - - ' - subtask_index: 13 - - subtask: 'Place the blue bowl on the table with the left gripper - - ' - subtask_index: 14 - - subtask: 'Grasp the mint candy with the right gripper - - ' - subtask_index: 15 - - subtask: Grasp the mint candy with the left gripper - subtask_index: 16 - - subtask: 'Grasp the cyan cup with the right gripper - - ' - subtask_index: 17 - - subtask: 'Grasp the green lemon with the left gripper - - ' - subtask_index: 18 - - subtask: "Grasp the white blackboard erasure\t with the left gripper\n" - subtask_index: 19 - - subtask: 'Grasp the white duck with the left gripper - - ' - subtask_index: 20 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 21 - - subtask: 'Grasp the mint candy with the left gripper - - ' - subtask_index: 22 - - subtask: Place the brown towel on the table with the left gripper - subtask_index: 23 - - subtask: 'Place the on the table with the right gripper - - ' - subtask_index: 24 - - subtask: Grasp the XX with the right gripper - subtask_index: 25 - - subtask: 'Place the blue blackboard erasure on the table with the left gripper - - ' - subtask_index: 26 - - subtask: Place the white blackboard erasure on the table with the left gripper - subtask_index: 27 - - subtask: 'Place the teapot on the table with the right gripper - - ' - subtask_index: 28 - - subtask: Grasp the white blackboard erasure with the left gripper - subtask_index: 29 - - subtask: 'Grasp the Square chewing gum with the right gripper - - ' - subtask_index: 30 - - subtask: 'Place the pink towel on the table with the left gripper - - ' - subtask_index: 31 - - subtask: 'Grasp the hard facial cleanser with the right gripper - - ' - subtask_index: 32 - - subtask: 'Grasp the with the left gripper - - ' - subtask_index: 33 - - subtask: Place the hard facial cleanser on the table with the right gripper - subtask_index: 34 - - subtask: 'Grasp the white duck with the left gripper - - ' - subtask_index: 35 - - subtask: Place the mint candy on the table with the right gripper - subtask_index: 36 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 37 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 38 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 39 - - subtask: Grasp the mango with the left gripper - subtask_index: 40 - - subtask: 'Place the white blackboard erasure on the table with the left gripper - - ' - subtask_index: 41 - - subtask: 'Grasp the brown towel with the left gripper - - ' - subtask_index: 42 - - subtask: 'Place the mango on the table with the right gripper - - ' - subtask_index: 43 - - subtask: 'Place the brown towel on the table with the left gripper - - ' - subtask_index: 44 - - subtask: Place the teapot on the table with the left gripper - subtask_index: 45 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 46 - - subtask: 'Place the pink towel on the table with the right gripper - - ' - subtask_index: 47 - - subtask: 'Grasp the pink towel with the right gripper - - ' - subtask_index: 48 - - subtask: Place the waffle on the table with the right gripper - subtask_index: 49 - - subtask: 'Place the blue pot on the table with the left gripper - - ' - subtask_index: 50 - - subtask: Place the pen container on the table with the right gripper - subtask_index: 51 - - subtask: 'Grasp the blue pot with the right gripper - - ' - subtask_index: 52 - - subtask: 'Grasp the pen container with the left gripper - - ' - subtask_index: 53 - - subtask: 'Grasp the green lemon with the right gripper - - ' - subtask_index: 54 - - subtask: Grasp the eggplant with the right gripper - subtask_index: 55 - - subtask: "Place the white blackboard erasure\t on the table with the left gripper\n" - subtask_index: 56 - - subtask: 'Place the eggplant on the table with the left gripper - - ' - subtask_index: 57 - - subtask: Place the green lemon on the table with the left gripper - subtask_index: 58 - - subtask: Place the XX on the table with the right gripper - subtask_index: 59 - - subtask: End - subtask_index: 60 - - subtask: Grasp the white blackboard erasure with the right gripper - subtask_index: 61 - - subtask: 'Place the white duck on the table with the left gripper - - ' - subtask_index: 62 - - subtask: 'Place the orange on the table with the left gripper - - ' - subtask_index: 63 - - subtask: 'Grasp the eggplant with the right gripper - - ' - subtask_index: 64 - - subtask: 'Grasp the brown towel with the right gripper - - ' - subtask_index: 65 - - subtask: 'Place the square chewing gum on the table with the right gripper - - ' - subtask_index: 66 - - subtask: 'Place the compass on the table with the right gripper - - ' - subtask_index: 67 - - subtask: 'Grasp the orange with the left gripper - - ' - subtask_index: 68 - - subtask: 'Place the hard facial cleanser on the table with the left gripper - - ' - subtask_index: 69 - - subtask: 'Grasp the blue blackboard erasure with the left gripper - - ' - subtask_index: 70 - - subtask: 'Place the brown towel on the table with the right gripper - - ' - subtask_index: 71 - - subtask: 'Grasp the blue blackboard erasure with the right gripper - - ' - subtask_index: 72 - - subtask: 'Grasp the eggplant with the left gripper - - ' - subtask_index: 73 - - subtask: 'Grasp the square chewing gum with the right gripper - - ' - subtask_index: 74 - - subtask: 'Place the mango on the table with the left gripper - - ' - subtask_index: 75 - - subtask: Grasp the waffle with the right gripper - subtask_index: 76 - - subtask: "Grasp the white blackboard erasure\t with the right gripper\n" - subtask_index: 77 - - subtask: 'Place the teapot on the table with the left gripper - - ' - subtask_index: 78 - - subtask: 'Place the waffle on the table with the right gripper - - ' - subtask_index: 79 - - subtask: 'Grasp the mango with the right gripper - - ' - subtask_index: 80 - - subtask: 'Grasp the white blackboard erasure with the left gripper - - ' - subtask_index: 81 - - subtask: 'Grasp the blue pot with the left gripper - - ' - subtask_index: 82 - - subtask: Place the square chewing gum on the table with the right gripper - subtask_index: 83 - - subtask: 'Place the pen container on the table with the right gripper - - ' - subtask_index: 84 - - subtask: 'Place the green lemon on the table with the right gripper - - ' - subtask_index: 85 - - subtask: 'Place the blue blackboard erasure on the table with the right gripper - - ' - subtask_index: 86 - - subtask: 'Grasp the teapot with the left gripper - - ' - subtask_index: 87 - - subtask: 'Place the borwn towel on the table with the right gripper - - ' - subtask_index: 88 - - subtask: 'Place the pen container on the table with the left gripper - - ' - subtask_index: 89 - - subtask: 'Grasp the compass with the left gripper - - ' - subtask_index: 90 - - subtask: 'Grasp the fruit candy with the right gripper - - ' - subtask_index: 91 - - subtask: 'Place the cyan cup on the table with the left gripper - - ' - subtask_index: 92 - - subtask: 'Place the fruit candy on the table with the right gripper - - ' - subtask_index: 93 - - subtask: 'Place the compass on the table with the left gripper - - ' - subtask_index: 94 - - subtask: 'Grasp the white duck with the right gripper - - ' - subtask_index: 95 - - subtask: 'Grasp the waffle with the right gripper - - ' - subtask_index: 96 - - subtask: 'Place the blue pot on the table with the left gripper - - ' - subtask_index: 97 - - subtask: 'Grasp the mango with the left gripper - - ' - subtask_index: 98 - - subtask: 'Grasp the teapot with the right gripper - - ' - subtask_index: 99 - - subtask: 'Place the teacup on the table with the left gripper - - ' - subtask_index: 100 - - subtask: 'Grasp the pink towel with the right gripper - - ' - subtask_index: 101 - - subtask: 'Grasp the blue pot with the left gripper - - ' - subtask_index: 102 - - subtask: 'Place the mango on the table with the right gripper - - ' - subtask_index: 103 - - subtask: Place the mangosteen on the table with the left gripper - subtask_index: 104 - - subtask: 'Grasp the square chewing gum with the left gripper - - ' - subtask_index: 105 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 106 - - subtask: 'Grasp the compass with the right gripper - - ' - subtask_index: 107 - - subtask: 'Place the tea cup on the table with the right gripper - - ' - subtask_index: 108 - - subtask: 'Place the teapot on the table with the left gripper - - ' - subtask_index: 109 - - subtask: 'Grasp the cyan cup with the left gripper - - ' - subtask_index: 110 - - subtask: 'Grasp the eggplant with the right gripper - - ' - subtask_index: 111 - - subtask: Place the white blackboard erasure on the table with the right gripper - subtask_index: 112 - - subtask: 'Place the green lemon on the table with the left gripper - - ' - subtask_index: 113 - - subtask: 'Grasp the hard facial cleanser with the left gripper - - ' - subtask_index: 114 - - subtask: "Place the white blackboard erasure\t on the table with the right gripper\n" - subtask_index: 115 - - subtask: Place the mango on the table with the left gripper - subtask_index: 116 - - subtask: 'Grasp the tea cup with the right gripper - - ' - subtask_index: 117 - - subtask: 'Grasp the waffle with the left gripper - - ' - subtask_index: 118 - - subtask: 'Grasp the pink towel with the left gripper - - ' - subtask_index: 119 - - subtask: 'Place the hard facial cleanser on the table with the right gripper - - ' - subtask_index: 120 - - subtask: 'Place the eggplant on the table with the right gripper - - ' - subtask_index: 121 - - subtask: 'Place the mint candy on the table with the left gripper - - ' - subtask_index: 122 - - subtask: 'Grasp the mint candy with the right gripper - - ' - subtask_index: 123 - - subtask: 'Grasp the chocolate with the right gripper - - ' - subtask_index: 124 - - subtask: 'Place the cyan cup on the table with the right gripper - - ' - subtask_index: 125 - - subtask: Grasp the XX with the left gripper - subtask_index: 126 - - subtask: Place the chocolate on the table with the right gripper - subtask_index: 127 - - subtask: Grasp the mint candy with the right gripper - subtask_index: 128 - - subtask: 'Grasp the with the right gripper - - ' - subtask_index: 129 - - subtask: Place the brown towel on the table with the right gripper - subtask_index: 130 - - subtask: Grasp the green lemon with the left gripper - subtask_index: 131 - - subtask: Place the Mangosteen on the table with the right gripper - subtask_index: 132 - - subtask: Place the square chewing gum on the table with the left gripper - subtask_index: 133 - - subtask: 'Grasp the teacup with the left gripper - - ' - subtask_index: 134 - - subtask: 'Place the chocolate on the table with the right gripper - - ' - subtask_index: 135 - - subtask: 'Grasp the blue bowl with the left gripper - - ' - subtask_index: 136 - - subtask: Grasp the teapot with the left gripper - subtask_index: 137 - - subtask: 'Place the pink bowel on the table with the right gripper - - ' - subtask_index: 138 - - subtask: 'Place the mint candy on the table with the right gripper - - ' - subtask_index: 139 - - subtask: 'null' - subtask_index: 140 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 119721 - dataset_size: 6.75 GB - data_structure: 'Agilex_Cobot_Magic_move_object_black_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (188 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml index c3a24de23bd94350ac3256baead7b1e96d9757cd..e151892e329001a283393ca0994a457b0e1a5ecf 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_object_green_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -213,205 +213,584 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the gripper move the object. +task_instruction: +- the gripper move the object. sub_tasks: -- Place the XX on the table with the left gripper -- Grasp the blue blackboard earser with the right gripper -- Grasp the square chewing gun with the right gripper -- Grasp the pen container with the right gripper -- Grasp the blue bowl with the left gripper -- Place the hard blackbaord cleanser on the table with the left gripper -- Place the blue towel on the table with the right gripper -- Grasp the orange with the right gripper -- Grasp the white blackboard earser with the left gripper -- Place the mangosteen on the table with the left gripper -- Grasp the pen container with the right gripper -- Place the pen container on the table with the right gripper -- Grasp the white blackboard earser with left gripper -- Place the blue bowl on the table with the right gripper -- Place the eggplant on the table with the right gripper -- Place the blue blackboard earser on the table with the right gripper -- Place the blue blackboard earser on the table with the right gripper -- Place the orange on the table with the right gripper -- Place the blue bowl on the table with the right gripper -- Place the brown towel on the table with the left gripper -- Grasp the cyan cup with the right gripper -- Place the compasses on the table with the right gripper -- Grasp the compasses with the right gripper -- Grasp the green lemon with the right gripper -- Grasp the cyan cup with the left gripper -- Place the wallfe on the table with the left gripper -- Place the blue bowl on the table with the left gripper -- Grasp the fruit candy with the left gripper -- Grasp the orange with the right gripper -- Place the green lemon on the table with the right gripper -- Grasp the cyan cup with the right gripper -- Grasp the wallfe with the right gripper -- Grasp the green lemon with the left gripper -- Place the white blackboard earser on the table with the right gripper -- Grasp the green lemon with the right gripper -- Grasp the square chewing gun with the left gripper -- Grasp the square chewing gum with the left gripper -- Grasp the with cyan cup the left gripper -- Place the blue bowl on the table with the left gripper -- Grasp the tea[ot with the left gripper -- Grasp the cyan cup with the right gripper -- Place the wallfe on the table with the right gripper -- Place the pen container on the table with the right gripper -- Place the eggplant on the table with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Place the ornage on the table with the right gripper -- Grasp the pen container with the left gripper -- Place the fruit candy on the table with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Grasp the chocolate with the right gripper -- Grasp the blue blackboard earser with the right gripper -- Grasp the mangosteen with the left gripper -- Place the square chewing gun on the table with the right gripper -- Grasp the brown towel with the left gripper -- Place the brown towel on the table with the left gripper -- Grasp the brown towel with the right gripper mangosteen -- Grasp the blue bowl with the right gripper -- Place the teapot on the table with the left gripper -- Grasp the blue cup with the left gripper -- Grasp the teacup with the right gripper -- Grasp the square chewing gun with the right gripper -- Grasp the teapot with the right gripper -- Place the pink towel on the table with the left gripper -- Grasp the blue bowl with the left gripper -- Place the waffle on the table with the right gripper -- Place the pen container on the table with the right gripper -- Place the mangosteen on the table with the right gripper -- Grasp the pen container with the left gripper -- Grasp the eggplant with the right gripper -- Grasp the white blackboard earser with the right gripper -- Grasp the green lemon with the right gripper -- Place the eggplant on the table with the left gripper -- Grasp the eggplant with the left gripper -- Place the hard facial cleanser on the table with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Grasp the mangosteen with the left gripper -- Place the mangosteen on the table with the right gripper -- End -- Grasp the wallfe with the right gripper -- Grasp the white blackboard erasure with the right gripper -- Grasp the blue blackboard erasure with the left gripper -- Grasp the white blackboard earser with the right gripper -- Place the orange on the table with the left gripper -- Place the fruit candy on the table with the right gripper -- Grasp the brown towel with the right gripper -- Grasp the orange with the left gripper -- Place the compass on the table with the right gripper -- Grasp the tea cup with the left gripper -- Place the hard facial cleanser on the table with the left gripper -- Place the brown towel on the table with the right gripper -- Grasp the eggplant with the left gripper -- Place the pen container on the table with the left gripper -- Grasp the white blackboard earser with the right gripper -- Grasp the square chewing gun with the right gripper -- Place the cyan cup on the table with the left gripper -- Grasp the waffle with the right gripper -- Grasp the blue bowl with the right gripper -- Grasp the square chewing gun with the right gripper -- Place the waffle on the table with the right gripper -- Grasp the blue bowl with the right gripper -- Abnormal -- Grasp the white blackboard earser with the right gripper mangosteen -- Grasp the blue towel with the right gripper -- Grasp the brown towel with the right gripper -- Place the pen container on the table with the right gripper -- Place the square chewing gum on the table with the right gripper -- Place the green lemon on the table with the right gripper -- Place the chocolate on the table with the right gripper -- Grasp the square facial square with the right gripper -- Grasp the teapot with the left gripper -- Place the pen container on the table with the left gripper -- Place the hard facial earser on the table with the right gripper -- Place the teacup on the table with the left gripper -- Place the sqaure chewing gun on the table with the right gripper -- Place the blue cup on the table with the left gripper -- Place the blue blackboard erasure on the table with the left gripper -- Grasp the wallfe with the right gripper -- Grasp the cyan cup with the right gripper -- Grasp the pink towel with the left gripper -- Grasp the mangosteen with the right gripper -- Place the cyan cup on the table with the left gripper -- Place the wallfe on the table with the right gripper -- Grasp the square chewing gun with the left gripper -- Grasp the blue bowel with the right gripper -- Grasp the orange with the left gripper -- Place the sqaure chewing gun on the table with the right gripper -- Grasp the sqaure chewing gun with the right gripper -- Place the orange on the table with the right gripper -- Place the white blackboard earser on the table with the left gripper -- Place the teacup on the table with the left gripper -- Grasp the pen container with the right gripper -- Grasp the blue blackboard earser with the left gripper -- Place the teapot on the table with the right gripper -- Grasp the wallfe with the right gripper -- Place the mangosteen on the table with the left gripper -- Place the square chewing gun on the table with the left gripper -- Grasp the fruit candy with the right gripper -- Grasp the square chewing gum with the right gripper -- Grasp the compass with the right gripper -- Place the teapot on the table with the left gripper -- Grasp the cyan cup with the left gripper -- Place the teacup on the table with the right gripper -- Grasp the eggplant with the right gripper -- Grasp the pen container with the right gripper mangosteen -- Grasp the mangosteen with the right gripper -- Place the green lemon on the table with the left gripper -- Place the pen containeron the table with the left gripper -- Place the white blackboard erasure on the table with the right gripper -- Grasp the hard facial cleanser with the left gripper -- Grasp the whiite blackboard earser with the right gripper -- Grasp the wallfe with the left gripper -- Grasp the wallfe with the right gripper mangosteen -- Grasp the chocolate with the right gripper -- Place the cyan cup on the table with the left gripper -- Grasp the cyan cup with the right gripper -- Place the green lemon on the table with the right gripper -- Place the chocolate on the table with the left gripper -- Grasp the cyan cup with the right gripper mangosteen -- Grasp the eggplant with the right gripper -- Grasp the white blackboard earser with the left gripper -- Place the hard facial cleanser on the table with the right gripper -- Place the blue blackboard earser on the table with the left gripper -- Place the eggplant on the table with the right gripper -- Grasp the chocolate with the right gripper -- Grasp the chocolate with the left gripper -- Place the orange on the table with the left gripper -- Grasp the XX with the left gripper -- Place the pen container on the table with the left gripper -- Place the chocolate on the table with the right gripper -- Place the white blackboard earser on the table with the left gripper -- Grasp the chocolate with the left gripper -- Grasp the pen container with the left gripper -- Place the sqaure chewing gun on the table with the left gripper -- Place the tea cup on the table with the left gripper -- Place the chocolate on the table with the right gripper -- Grasp the teacup with the left gripper -- Place the square chewing gum on the table with the left gripper -- Grasp the brown towel with the right gripper -- Grasp the teapot with the left gripper -- Grasp the teacup with the left gripper -- Place the chocolate on the table with the left gripper -- Place the cyan cup on the table with the right gripper -- 'null' +- subtask: Place the XX on the table with the left gripper + subtask_index: 0 +- subtask: 'Grasp the blue blackboard earser with the right gripper + + ' + subtask_index: 1 +- subtask: 'Grasp the square chewing gun with the right gripper ' + subtask_index: 2 +- subtask: 'Grasp the pen container with the right gripper + + ' + subtask_index: 3 +- subtask: 'Grasp the blue bowl with the left gripper + + ' + subtask_index: 4 +- subtask: 'Place the hard blackbaord cleanser on the table with the left gripper + + ' + subtask_index: 5 +- subtask: 'Place the blue towel on the table with the right gripper + + ' + subtask_index: 6 +- subtask: 'Grasp the orange with the right gripper + + ' + subtask_index: 7 +- subtask: 'Grasp the white blackboard earser with the left gripper + + ' + subtask_index: 8 +- subtask: 'Place the mangosteen on the table with the left gripper + + ' + subtask_index: 9 +- subtask: Grasp the pen container with the right gripper + subtask_index: 10 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 11 +- subtask: Grasp the white blackboard earser with left gripper + subtask_index: 12 +- subtask: Place the blue bowl on the table with the right gripper + subtask_index: 13 +- subtask: Place the eggplant on the table with the right gripper + subtask_index: 14 +- subtask: 'Place the blue blackboard earser on the table with the right gripper + + ' + subtask_index: 15 +- subtask: 'Place the blue blackboard earser on the table with the right gripper + + ' + subtask_index: 16 +- subtask: 'Place the orange on the table with the right gripper + + ' + subtask_index: 17 +- subtask: 'Place the blue bowl on the table with the right gripper + + ' + subtask_index: 18 +- subtask: 'Place the brown towel on the table with the left gripper + + ' + subtask_index: 19 +- subtask: 'Grasp the cyan cup with the right gripper ' + subtask_index: 20 +- subtask: Place the compasses on the table with the right gripper + subtask_index: 21 +- subtask: Grasp the compasses with the right gripper + subtask_index: 22 +- subtask: 'Grasp the green lemon with the right gripper + + ' + subtask_index: 23 +- subtask: 'Grasp the cyan cup with the left gripper + + ' + subtask_index: 24 +- subtask: 'Place the wallfe on the table with the left gripper + + ' + subtask_index: 25 +- subtask: 'Place the blue bowl on the table with the left gripper + + ' + subtask_index: 26 +- subtask: Grasp the fruit candy with the left gripper + subtask_index: 27 +- subtask: Grasp the orange with the right gripper + subtask_index: 28 +- subtask: Place the green lemon on the table with the right gripper + subtask_index: 29 +- subtask: 'Grasp the cyan cup with the right gripper + + ' + subtask_index: 30 +- subtask: 'Grasp the wallfe with the right gripper + + ' + subtask_index: 31 +- subtask: 'Grasp the green lemon with the left gripper + + ' + subtask_index: 32 +- subtask: 'Place the white blackboard earser on the table with the right gripper + + ' + subtask_index: 33 +- subtask: ' + + Grasp the green lemon with the right gripper' + subtask_index: 34 +- subtask: 'Grasp the square chewing gun with the left gripper + + ' + subtask_index: 35 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 36 +- subtask: 'Grasp the with cyan cup the left gripper + + ' + subtask_index: 37 +- subtask: Place the blue bowl on the table with the left gripper + subtask_index: 38 +- subtask: 'Grasp the tea[ot with the left gripper + + ' + subtask_index: 39 +- subtask: 'Grasp the cyan cup with the right gripper ' + subtask_index: 40 +- subtask: 'Place the wallfe on the table with the right gripper + + ' + subtask_index: 41 +- subtask: 'Place the pen container on the table with the right gripper + + ' + subtask_index: 42 +- subtask: Place the eggplant on the table with the left gripper + subtask_index: 43 +- subtask: 'Grasp the hard facial cleanser with the right gripper + + ' + subtask_index: 44 +- subtask: 'Place the ornage on the table with the right gripper + + ' + subtask_index: 45 +- subtask: 'Grasp the pen container with the left gripper + + ' + subtask_index: 46 +- subtask: Place the fruit candy on the table with the left gripper + subtask_index: 47 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 48 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 49 +- subtask: 'Grasp the blue blackboard earser with the right gripper ' + subtask_index: 50 +- subtask: Grasp the mangosteen with the left gripper + subtask_index: 51 +- subtask: 'Place the square chewing gun on the table with the right gripper + + ' + subtask_index: 52 +- subtask: 'Grasp the brown towel with the left gripper + + ' + subtask_index: 53 +- subtask: 'Place the brown towel on the table with the left gripper + + ' + subtask_index: 54 +- subtask: 'Grasp the brown towel with the right gripper mangosteen + + ' + subtask_index: 55 +- subtask: 'Grasp the blue bowl with the right gripper + + ' + subtask_index: 56 +- subtask: Place the teapot on the table with the left gripper + subtask_index: 57 +- subtask: 'Grasp the blue cup with the left gripper + + ' + subtask_index: 58 +- subtask: Grasp the teacup with the right gripper + subtask_index: 59 +- subtask: Grasp the square chewing gun with the right gripper + subtask_index: 60 +- subtask: Grasp the teapot with the right gripper + subtask_index: 61 +- subtask: Place the pink towel on the table with the left gripper + subtask_index: 62 +- subtask: Grasp the blue bowl with the left gripper + subtask_index: 63 +- subtask: Place the waffle on the table with the right gripper + subtask_index: 64 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 65 +- subtask: Place the mangosteen on the table with the right gripper + subtask_index: 66 +- subtask: 'Grasp the pen container with the left gripper + + ' + subtask_index: 67 +- subtask: Grasp the eggplant with the right gripper + subtask_index: 68 +- subtask: 'Grasp the white blackboard earser with the right gripper ' + subtask_index: 69 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 70 +- subtask: 'Place the eggplant on the table with the left gripper + + ' + subtask_index: 71 +- subtask: Grasp the eggplant with the left gripper + subtask_index: 72 +- subtask: Place the hard facial cleanser on the table with the left gripper + subtask_index: 73 +- subtask: 'Grasp the hard facial cleanser with the right gripper ' + subtask_index: 74 +- subtask: 'Grasp the mangosteen with the left gripper + + ' + subtask_index: 75 +- subtask: 'Place the mangosteen on the table with the right gripper + + ' + subtask_index: 76 +- subtask: End + subtask_index: 77 +- subtask: 'Grasp the wallfe with the right gripper ' + subtask_index: 78 +- subtask: Grasp the white blackboard erasure with the right gripper + subtask_index: 79 +- subtask: Grasp the blue blackboard erasure with the left gripper + subtask_index: 80 +- subtask: 'Grasp the white blackboard earser with the right gripper + + ' + subtask_index: 81 +- subtask: 'Place the orange on the table with the left gripper + + ' + subtask_index: 82 +- subtask: Place the fruit candy on the table with the right gripper + subtask_index: 83 +- subtask: 'Grasp the brown towel with the right gripper + + ' + subtask_index: 84 +- subtask: 'Grasp the orange with the left gripper + + ' + subtask_index: 85 +- subtask: 'Place the compass on the table with the right gripper + + ' + subtask_index: 86 +- subtask: 'Grasp the tea cup with the left gripper + + ' + subtask_index: 87 +- subtask: 'Place the hard facial cleanser on the table with the left gripper + + ' + subtask_index: 88 +- subtask: 'Place the brown towel on the table with the right gripper + + ' + subtask_index: 89 +- subtask: 'Grasp the eggplant with the left gripper + + ' + subtask_index: 90 +- subtask: Place the pen container on the table with the left gripper + subtask_index: 91 +- subtask: 'Grasp the white blackboard earser with the right gripper + + ' + subtask_index: 92 +- subtask: 'Grasp the square chewing gun with the right gripper ' + subtask_index: 93 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 94 +- subtask: Grasp the waffle with the right gripper + subtask_index: 95 +- subtask: Grasp the blue bowl with the right gripper + subtask_index: 96 +- subtask: 'Grasp the square chewing gun with the right gripper ' + subtask_index: 97 +- subtask: 'Place the waffle on the table with the right gripper + + ' + subtask_index: 98 +- subtask: 'Grasp the blue bowl with the right gripper ' + subtask_index: 99 +- subtask: Abnormal + subtask_index: 100 +- subtask: 'Grasp the white blackboard earser with the right gripper mangosteen + + ' + subtask_index: 101 +- subtask: "Grasp the blue towel with the right gripper \ + \ \n" + subtask_index: 102 +- subtask: 'Grasp the brown towel with the right gripper + + ' + subtask_index: 103 +- subtask: 'Place the pen container on the table with the right gripper + + ' + subtask_index: 104 +- subtask: Place the square chewing gum on the table with the right gripper + subtask_index: 105 +- subtask: 'Place the green lemon on the table with the right gripper + + ' + subtask_index: 106 +- subtask: 'Place the chocolate on the table with the right gripper + + ' + subtask_index: 107 +- subtask: 'Grasp the square facial square with the right gripper ' + subtask_index: 108 +- subtask: 'Grasp the teapot with the left gripper + + ' + subtask_index: 109 +- subtask: 'Place the pen container on the table with the left gripper + + ' + subtask_index: 110 +- subtask: 'Place the hard facial earser on the table with the right gripper + + ' + subtask_index: 111 +- subtask: Place the teacup on the table with the left gripper + subtask_index: 112 +- subtask: 'Place the sqaure chewing gun on the table with the right gripper + + ' + subtask_index: 113 +- subtask: 'Place the blue cup on the table with the left gripper + + ' + subtask_index: 114 +- subtask: Place the blue blackboard erasure on the table with the left gripper + subtask_index: 115 +- subtask: 'Grasp the wallfe with the right gripper ' + subtask_index: 116 +- subtask: Grasp the cyan cup with the right gripper + subtask_index: 117 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 118 +- subtask: Grasp the mangosteen with the right gripper + subtask_index: 119 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 120 +- subtask: 'Place the wallfe on the table with the right gripper + + ' + subtask_index: 121 +- subtask: 'Grasp the square chewing gun with the left gripper + + ' + subtask_index: 122 +- subtask: 'Grasp the blue bowel with the right gripper ' + subtask_index: 123 +- subtask: Grasp the orange with the left gripper + subtask_index: 124 +- subtask: 'Place the sqaure chewing gun on the table with the right gripper + + ' + subtask_index: 125 +- subtask: 'Grasp the sqaure chewing gun with the right gripper + + ' + subtask_index: 126 +- subtask: Place the orange on the table with the right gripper + subtask_index: 127 +- subtask: 'Place the white blackboard earser on the table with the left gripper + + ' + subtask_index: 128 +- subtask: 'Place the teacup on the table with the left gripper + + ' + subtask_index: 129 +- subtask: Grasp the pen container with the right gripper + subtask_index: 130 +- subtask: 'Grasp the blue blackboard earser with the left gripper + + ' + subtask_index: 131 +- subtask: Place the teapot on the table with the right gripper + subtask_index: 132 +- subtask: 'Grasp the wallfe with the right gripper ' + subtask_index: 133 +- subtask: Place the mangosteen on the table with the left gripper + subtask_index: 134 +- subtask: 'Place the square chewing gun on the table with the left gripper + + ' + subtask_index: 135 +- subtask: Grasp the fruit candy with the right gripper + subtask_index: 136 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 137 +- subtask: 'Grasp the compass with the right gripper + + ' + subtask_index: 138 +- subtask: 'Place the teapot on the table with the left gripper + + ' + subtask_index: 139 +- subtask: 'Grasp the cyan cup with the left gripper + + ' + subtask_index: 140 +- subtask: Place the teacup on the table with the right gripper + subtask_index: 141 +- subtask: 'Grasp the eggplant with the right gripper + + ' + subtask_index: 142 +- subtask: 'Grasp the pen container with the right gripper mangosteen + + ' + subtask_index: 143 +- subtask: 'Grasp the mangosteen with the right gripper ' + subtask_index: 144 +- subtask: 'Place the green lemon on the table with the left gripper + + ' + subtask_index: 145 +- subtask: Place the pen containeron the table with the left gripper + subtask_index: 146 +- subtask: Place the white blackboard erasure on the table with the right gripper + subtask_index: 147 +- subtask: 'Grasp the hard facial cleanser with the left gripper + + ' + subtask_index: 148 +- subtask: 'Grasp the whiite blackboard earser with the right gripper + + ' + subtask_index: 149 +- subtask: 'Grasp the wallfe with the left gripper + + ' + subtask_index: 150 +- subtask: 'Grasp the wallfe with the right gripper mangosteen + + ' + subtask_index: 151 +- subtask: 'Grasp the chocolate with the right gripper ' + subtask_index: 152 +- subtask: 'Place the cyan cup on the table with the left gripper + + ' + subtask_index: 153 +- subtask: 'Grasp the cyan cup with the right gripper + + ' + subtask_index: 154 +- subtask: 'Place the green lemon on the table with the right gripper + + ' + subtask_index: 155 +- subtask: Place the chocolate on the table with the left gripper + subtask_index: 156 +- subtask: 'Grasp the cyan cup with the right gripper mangosteen + + ' + subtask_index: 157 +- subtask: 'Grasp the eggplant with the right gripper ' + subtask_index: 158 +- subtask: 'Grasp the white blackboard earser with the left gripper + + ' + subtask_index: 159 +- subtask: 'Place the hard facial cleanser on the table with the right gripper + + ' + subtask_index: 160 +- subtask: 'Place the blue blackboard earser on the table with the left gripper + + ' + subtask_index: 161 +- subtask: 'Place the eggplant on the table with the right gripper + + ' + subtask_index: 162 +- subtask: 'Grasp the chocolate with the right gripper + + ' + subtask_index: 163 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 164 +- subtask: Place the orange on the table with the left gripper + subtask_index: 165 +- subtask: Grasp the XX with the left gripper + subtask_index: 166 +- subtask: Place the pen container on the table with the left gripper + subtask_index: 167 +- subtask: Place the chocolate on the table with the right gripper + subtask_index: 168 +- subtask: Place the white blackboard earser on the table with the left gripper + subtask_index: 169 +- subtask: 'Grasp the chocolate with the left gripper + + ' + subtask_index: 170 +- subtask: Grasp the pen container with the left gripper + subtask_index: 171 +- subtask: 'Place the sqaure chewing gun on the table with the left gripper + + ' + subtask_index: 172 +- subtask: 'Place the tea cup on the table with the left gripper + + ' + subtask_index: 173 +- subtask: 'Place the chocolate on the table with the right gripper + + ' + subtask_index: 174 +- subtask: 'Grasp the teacup with the left gripper + + ' + subtask_index: 175 +- subtask: Place the square chewing gum on the table with the left gripper + subtask_index: 176 +- subtask: 'Grasp the brown towel with the right gripper ' + subtask_index: 177 +- subtask: Grasp the teapot with the left gripper + subtask_index: 178 +- subtask: Grasp the teacup with the left gripper + subtask_index: 179 +- subtask: 'Place the chocolate on the table with the left gripper + + ' + subtask_index: 180 +- subtask: 'Place the cyan cup on the table with the right gripper + + ' + subtask_index: 181 +- subtask: 'null' + subtask_index: 182 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -419,13 +798,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -433,8 +809,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 197 total_frames: 85405 fps: 30 @@ -535,11 +910,9 @@ data_structure: 'Agilex_Cobot_Magic_move_object_green_tablecloth_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:196 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -812,7 +1185,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -820,7 +1193,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -847,754 +1219,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_object_green_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial & convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the gripper move the object. - sub_tasks: - - subtask: Place the XX on the table with the left gripper - subtask_index: 0 - - subtask: 'Grasp the blue blackboard earser with the right gripper - - ' - subtask_index: 1 - - subtask: 'Grasp the square chewing gun with the right gripper ' - subtask_index: 2 - - subtask: 'Grasp the pen container with the right gripper - - ' - subtask_index: 3 - - subtask: 'Grasp the blue bowl with the left gripper - - ' - subtask_index: 4 - - subtask: 'Place the hard blackbaord cleanser on the table with the left gripper - - ' - subtask_index: 5 - - subtask: 'Place the blue towel on the table with the right gripper - - ' - subtask_index: 6 - - subtask: 'Grasp the orange with the right gripper - - ' - subtask_index: 7 - - subtask: 'Grasp the white blackboard earser with the left gripper - - ' - subtask_index: 8 - - subtask: 'Place the mangosteen on the table with the left gripper - - ' - subtask_index: 9 - - subtask: Grasp the pen container with the right gripper - subtask_index: 10 - - subtask: Place the pen container on the table with the right gripper - subtask_index: 11 - - subtask: Grasp the white blackboard earser with left gripper - subtask_index: 12 - - subtask: Place the blue bowl on the table with the right gripper - subtask_index: 13 - - subtask: Place the eggplant on the table with the right gripper - subtask_index: 14 - - subtask: 'Place the blue blackboard earser on the table with the right gripper - - ' - subtask_index: 15 - - subtask: 'Place the blue blackboard earser on the table with the right gripper - - ' - subtask_index: 16 - - subtask: 'Place the orange on the table with the right gripper - - ' - subtask_index: 17 - - subtask: 'Place the blue bowl on the table with the right gripper - - ' - subtask_index: 18 - - subtask: 'Place the brown towel on the table with the left gripper - - ' - subtask_index: 19 - - subtask: 'Grasp the cyan cup with the right gripper ' - subtask_index: 20 - - subtask: Place the compasses on the table with the right gripper - subtask_index: 21 - - subtask: Grasp the compasses with the right gripper - subtask_index: 22 - - subtask: 'Grasp the green lemon with the right gripper - - ' - subtask_index: 23 - - subtask: 'Grasp the cyan cup with the left gripper - - ' - subtask_index: 24 - - subtask: 'Place the wallfe on the table with the left gripper - - ' - subtask_index: 25 - - subtask: 'Place the blue bowl on the table with the left gripper - - ' - subtask_index: 26 - - subtask: Grasp the fruit candy with the left gripper - subtask_index: 27 - - subtask: Grasp the orange with the right gripper - subtask_index: 28 - - subtask: Place the green lemon on the table with the right gripper - subtask_index: 29 - - subtask: 'Grasp the cyan cup with the right gripper - - ' - subtask_index: 30 - - subtask: 'Grasp the wallfe with the right gripper - - ' - subtask_index: 31 - - subtask: 'Grasp the green lemon with the left gripper - - ' - subtask_index: 32 - - subtask: 'Place the white blackboard earser on the table with the right gripper - - ' - subtask_index: 33 - - subtask: ' - - Grasp the green lemon with the right gripper' - subtask_index: 34 - - subtask: 'Grasp the square chewing gun with the left gripper - - ' - subtask_index: 35 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 36 - - subtask: 'Grasp the with cyan cup the left gripper - - ' - subtask_index: 37 - - subtask: Place the blue bowl on the table with the left gripper - subtask_index: 38 - - subtask: 'Grasp the tea[ot with the left gripper - - ' - subtask_index: 39 - - subtask: 'Grasp the cyan cup with the right gripper ' - subtask_index: 40 - - subtask: 'Place the wallfe on the table with the right gripper - - ' - subtask_index: 41 - - subtask: 'Place the pen container on the table with the right gripper - - ' - subtask_index: 42 - - subtask: Place the eggplant on the table with the left gripper - subtask_index: 43 - - subtask: 'Grasp the hard facial cleanser with the right gripper - - ' - subtask_index: 44 - - subtask: 'Place the ornage on the table with the right gripper - - ' - subtask_index: 45 - - subtask: 'Grasp the pen container with the left gripper - - ' - subtask_index: 46 - - subtask: Place the fruit candy on the table with the left gripper - subtask_index: 47 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 48 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 49 - - subtask: 'Grasp the blue blackboard earser with the right gripper ' - subtask_index: 50 - - subtask: Grasp the mangosteen with the left gripper - subtask_index: 51 - - subtask: 'Place the square chewing gun on the table with the right gripper - - ' - subtask_index: 52 - - subtask: 'Grasp the brown towel with the left gripper - - ' - subtask_index: 53 - - subtask: 'Place the brown towel on the table with the left gripper - - ' - subtask_index: 54 - - subtask: 'Grasp the brown towel with the right gripper mangosteen - - ' - subtask_index: 55 - - subtask: 'Grasp the blue bowl with the right gripper - - ' - subtask_index: 56 - - subtask: Place the teapot on the table with the left gripper - subtask_index: 57 - - subtask: 'Grasp the blue cup with the left gripper - - ' - subtask_index: 58 - - subtask: Grasp the teacup with the right gripper - subtask_index: 59 - - subtask: Grasp the square chewing gun with the right gripper - subtask_index: 60 - - subtask: Grasp the teapot with the right gripper - subtask_index: 61 - - subtask: Place the pink towel on the table with the left gripper - subtask_index: 62 - - subtask: Grasp the blue bowl with the left gripper - subtask_index: 63 - - subtask: Place the waffle on the table with the right gripper - subtask_index: 64 - - subtask: Place the pen container on the table with the right gripper - subtask_index: 65 - - subtask: Place the mangosteen on the table with the right gripper - subtask_index: 66 - - subtask: 'Grasp the pen container with the left gripper - - ' - subtask_index: 67 - - subtask: Grasp the eggplant with the right gripper - subtask_index: 68 - - subtask: 'Grasp the white blackboard earser with the right gripper ' - subtask_index: 69 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 70 - - subtask: 'Place the eggplant on the table with the left gripper - - ' - subtask_index: 71 - - subtask: Grasp the eggplant with the left gripper - subtask_index: 72 - - subtask: Place the hard facial cleanser on the table with the left gripper - subtask_index: 73 - - subtask: 'Grasp the hard facial cleanser with the right gripper ' - subtask_index: 74 - - subtask: 'Grasp the mangosteen with the left gripper - - ' - subtask_index: 75 - - subtask: 'Place the mangosteen on the table with the right gripper - - ' - subtask_index: 76 - - subtask: End - subtask_index: 77 - - subtask: 'Grasp the wallfe with the right gripper ' - subtask_index: 78 - - subtask: Grasp the white blackboard erasure with the right gripper - subtask_index: 79 - - subtask: Grasp the blue blackboard erasure with the left gripper - subtask_index: 80 - - subtask: 'Grasp the white blackboard earser with the right gripper - - ' - subtask_index: 81 - - subtask: 'Place the orange on the table with the left gripper - - ' - subtask_index: 82 - - subtask: Place the fruit candy on the table with the right gripper - subtask_index: 83 - - subtask: 'Grasp the brown towel with the right gripper - - ' - subtask_index: 84 - - subtask: 'Grasp the orange with the left gripper - - ' - subtask_index: 85 - - subtask: 'Place the compass on the table with the right gripper - - ' - subtask_index: 86 - - subtask: 'Grasp the tea cup with the left gripper - - ' - subtask_index: 87 - - subtask: 'Place the hard facial cleanser on the table with the left gripper - - ' - subtask_index: 88 - - subtask: 'Place the brown towel on the table with the right gripper - - ' - subtask_index: 89 - - subtask: 'Grasp the eggplant with the left gripper - - ' - subtask_index: 90 - - subtask: Place the pen container on the table with the left gripper - subtask_index: 91 - - subtask: 'Grasp the white blackboard earser with the right gripper - - ' - subtask_index: 92 - - subtask: 'Grasp the square chewing gun with the right gripper ' - subtask_index: 93 - - subtask: 'Place the cyan cup on the table with the left gripper - - ' - subtask_index: 94 - - subtask: Grasp the waffle with the right gripper - subtask_index: 95 - - subtask: Grasp the blue bowl with the right gripper - subtask_index: 96 - - subtask: 'Grasp the square chewing gun with the right gripper ' - subtask_index: 97 - - subtask: 'Place the waffle on the table with the right gripper - - ' - subtask_index: 98 - - subtask: 'Grasp the blue bowl with the right gripper ' - subtask_index: 99 - - subtask: Abnormal - subtask_index: 100 - - subtask: 'Grasp the white blackboard earser with the right gripper mangosteen - - ' - subtask_index: 101 - - subtask: "Grasp the blue towel with the right gripper \ - \ \n" - subtask_index: 102 - - subtask: 'Grasp the brown towel with the right gripper - - ' - subtask_index: 103 - - subtask: 'Place the pen container on the table with the right gripper - - ' - subtask_index: 104 - - subtask: Place the square chewing gum on the table with the right gripper - subtask_index: 105 - - subtask: 'Place the green lemon on the table with the right gripper - - ' - subtask_index: 106 - - subtask: 'Place the chocolate on the table with the right gripper - - ' - subtask_index: 107 - - subtask: 'Grasp the square facial square with the right gripper ' - subtask_index: 108 - - subtask: 'Grasp the teapot with the left gripper - - ' - subtask_index: 109 - - subtask: 'Place the pen container on the table with the left gripper - - ' - subtask_index: 110 - - subtask: 'Place the hard facial earser on the table with the right gripper - - ' - subtask_index: 111 - - subtask: Place the teacup on the table with the left gripper - subtask_index: 112 - - subtask: 'Place the sqaure chewing gun on the table with the right gripper - - ' - subtask_index: 113 - - subtask: 'Place the blue cup on the table with the left gripper - - ' - subtask_index: 114 - - subtask: Place the blue blackboard erasure on the table with the left gripper - subtask_index: 115 - - subtask: 'Grasp the wallfe with the right gripper ' - subtask_index: 116 - - subtask: Grasp the cyan cup with the right gripper - subtask_index: 117 - - subtask: Grasp the pink towel with the left gripper - subtask_index: 118 - - subtask: Grasp the mangosteen with the right gripper - subtask_index: 119 - - subtask: 'Place the cyan cup on the table with the left gripper - - ' - subtask_index: 120 - - subtask: 'Place the wallfe on the table with the right gripper - - ' - subtask_index: 121 - - subtask: 'Grasp the square chewing gun with the left gripper - - ' - subtask_index: 122 - - subtask: 'Grasp the blue bowel with the right gripper ' - subtask_index: 123 - - subtask: Grasp the orange with the left gripper - subtask_index: 124 - - subtask: 'Place the sqaure chewing gun on the table with the right gripper - - ' - subtask_index: 125 - - subtask: 'Grasp the sqaure chewing gun with the right gripper - - ' - subtask_index: 126 - - subtask: Place the orange on the table with the right gripper - subtask_index: 127 - - subtask: 'Place the white blackboard earser on the table with the left gripper - - ' - subtask_index: 128 - - subtask: 'Place the teacup on the table with the left gripper - - ' - subtask_index: 129 - - subtask: Grasp the pen container with the right gripper - subtask_index: 130 - - subtask: 'Grasp the blue blackboard earser with the left gripper - - ' - subtask_index: 131 - - subtask: Place the teapot on the table with the right gripper - subtask_index: 132 - - subtask: 'Grasp the wallfe with the right gripper ' - subtask_index: 133 - - subtask: Place the mangosteen on the table with the left gripper - subtask_index: 134 - - subtask: 'Place the square chewing gun on the table with the left gripper - - ' - subtask_index: 135 - - subtask: Grasp the fruit candy with the right gripper - subtask_index: 136 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 137 - - subtask: 'Grasp the compass with the right gripper - - ' - subtask_index: 138 - - subtask: 'Place the teapot on the table with the left gripper - - ' - subtask_index: 139 - - subtask: 'Grasp the cyan cup with the left gripper - - ' - subtask_index: 140 - - subtask: Place the teacup on the table with the right gripper - subtask_index: 141 - - subtask: 'Grasp the eggplant with the right gripper - - ' - subtask_index: 142 - - subtask: 'Grasp the pen container with the right gripper mangosteen - - ' - subtask_index: 143 - - subtask: 'Grasp the mangosteen with the right gripper ' - subtask_index: 144 - - subtask: 'Place the green lemon on the table with the left gripper - - ' - subtask_index: 145 - - subtask: Place the pen containeron the table with the left gripper - subtask_index: 146 - - subtask: Place the white blackboard erasure on the table with the right gripper - subtask_index: 147 - - subtask: 'Grasp the hard facial cleanser with the left gripper - - ' - subtask_index: 148 - - subtask: 'Grasp the whiite blackboard earser with the right gripper - - ' - subtask_index: 149 - - subtask: 'Grasp the wallfe with the left gripper - - ' - subtask_index: 150 - - subtask: 'Grasp the wallfe with the right gripper mangosteen - - ' - subtask_index: 151 - - subtask: 'Grasp the chocolate with the right gripper ' - subtask_index: 152 - - subtask: 'Place the cyan cup on the table with the left gripper - - ' - subtask_index: 153 - - subtask: 'Grasp the cyan cup with the right gripper - - ' - subtask_index: 154 - - subtask: 'Place the green lemon on the table with the right gripper - - ' - subtask_index: 155 - - subtask: Place the chocolate on the table with the left gripper - subtask_index: 156 - - subtask: 'Grasp the cyan cup with the right gripper mangosteen - - ' - subtask_index: 157 - - subtask: 'Grasp the eggplant with the right gripper ' - subtask_index: 158 - - subtask: 'Grasp the white blackboard earser with the left gripper - - ' - subtask_index: 159 - - subtask: 'Place the hard facial cleanser on the table with the right gripper - - ' - subtask_index: 160 - - subtask: 'Place the blue blackboard earser on the table with the left gripper - - ' - subtask_index: 161 - - subtask: 'Place the eggplant on the table with the right gripper - - ' - subtask_index: 162 - - subtask: 'Grasp the chocolate with the right gripper - - ' - subtask_index: 163 - - subtask: Grasp the chocolate with the left gripper - subtask_index: 164 - - subtask: Place the orange on the table with the left gripper - subtask_index: 165 - - subtask: Grasp the XX with the left gripper - subtask_index: 166 - - subtask: Place the pen container on the table with the left gripper - subtask_index: 167 - - subtask: Place the chocolate on the table with the right gripper - subtask_index: 168 - - subtask: Place the white blackboard earser on the table with the left gripper - subtask_index: 169 - - subtask: 'Grasp the chocolate with the left gripper - - ' - subtask_index: 170 - - subtask: Grasp the pen container with the left gripper - subtask_index: 171 - - subtask: 'Place the sqaure chewing gun on the table with the left gripper - - ' - subtask_index: 172 - - subtask: 'Place the tea cup on the table with the left gripper - - ' - subtask_index: 173 - - subtask: 'Place the chocolate on the table with the right gripper - - ' - subtask_index: 174 - - subtask: 'Grasp the teacup with the left gripper - - ' - subtask_index: 175 - - subtask: Place the square chewing gum on the table with the left gripper - subtask_index: 176 - - subtask: 'Grasp the brown towel with the right gripper ' - subtask_index: 177 - - subtask: Grasp the teapot with the left gripper - subtask_index: 178 - - subtask: Grasp the teacup with the left gripper - subtask_index: 179 - - subtask: 'Place the chocolate on the table with the left gripper - - ' - subtask_index: 180 - - subtask: 'Place the cyan cup on the table with the right gripper - - ' - subtask_index: 181 - - subtask: 'null' - subtask_index: 182 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 85405 - dataset_size: 5.33 GB - data_structure: 'Agilex_Cobot_Magic_move_object_green_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (185 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml index e76c6d89086de615621dfa0d45a7675c113f641b..530c160186fa5199c69a85cae26275908d40f0bd 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_object_red_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -213,144 +213,273 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the gripper move the object. +task_instruction: +- the gripper move the object. sub_tasks: -- Grasp the pink towel with the right gripper -- Place the XX on the table with the left gripper -- Place the mint candy on the table with the left gripper -- Place the snickers on the table with the right gripper -- Grasp the pen container with the right gripper -- Grasp the grey towel with the left gripper -- Place the eyeglass case on the table with the left gripper -- Grasp the white duck with the left gripper -- Place the eggplant on the table with the right gripper -- Place the blue bowl on the table with the right gripper -- Grasp the banana with the left gripper -- Place the compasses on the table with the right gripper -- Place the sandwich on the table with the left gripper -- Place the pink cake on the table with the right gripper -- Place the banana on the table with the right gripper -- Grasp the compasses with the right gripper -- Grasp the orange with the right gripper -- Grasp the blue cup with the left gripper -- Place the peach on the table with the right gripper -- Place the green lemon on the table with the right gripper -- Grasp the mint candy with the left gripper -- Grasp the sandwich with the right gripper -- Grasp the eyeglass case with the left gripper -- Place the compasses on the table with the left gripper -- Place the eyeglass case on the table with the right gripper -- Grasp the square chewing gum with the left gripper -- Place the brown towel on the table with the left gripper -- Place the blue bowl on the table with the left gripper -- Grasp the sandwich biscuit with the right gripper -- Place the white blackboard erasure on the table with the left gripper -- Grasp the white blackboard erasure with the left gripper -- Grasp the snickers with the right gripper -- Grasp the eyeglass case with the right gripper -- Place the eggplant on the table with the left gripper -- Place the mango on the table with the right gripper -- Place the hard facial cleanser on the table with the right gripper -- Place the mint candy on the table with the right gripper -- Grasp the brown towel with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Grasp the chocolate with the right gripper -- Grasp the mangosteen with the left gripper -- Grasp the peach with the right gripper -- Grasp the brown towel with the right gripper -- Grasp the mango with the left gripper -- Place the snickers on the table with the left gripper -- Grasp the snickers with the left gripper -- Grasp the lemon with the left gripper -- Place the teapot on the table with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Grasp the sandwich with the left gripper -- Place the white duck on the table with the left gripper -- Place the white duck on the table with the right gripper -- Grasp the teapot with the right gripper -- Place the pink towel on the table with the left gripper -- Grasp the hollow ring bread with the right gripper -- Grasp the hollow ring bread with the right gripper -- Grasp the blue bowl with the left gripper -- Place the waffle on the table with the right gripper -- Place the pen container on the table with the right gripper -- Place the mangosteen on the table with the right gripper -- Grasp the eggplant with the right gripper -- Grasp the pink cake with the left gripper -- Grasp the gray towel with the right gripper -- Place the pink towel on the table with the right gripper -- Grasp the green lemon with the right gripper -- Place the green lemon on the table with the left gripper -- Grasp the eggplant with the left gripper -- Place the hard facial cleanser on the table with the left gripper -- Place the lemon on the table with the left gripper -- End -- Grasp the white duck with the right gripper -- Grasp the white blackboard erasure with the right gripper -- Grasp the blue blackboard erasure with the left gripper -- Place the sandwich biscuit on the table with the right gripper -- Place the sandwich on the table with the right gripper -- Place the fruit candy on the table with the right gripper -- Place the gray towel on the table with the right gripper -- Place the blue cup on the table with the right gripper -- Place the pen container on the table with the left gripper -- Grasp the compasses with the left gripper -- Grasp the waffle with the right gripper -- Grasp the blue bowl with the right gripper -- Place the pink cake on the table with the left gripper -- Abnormal -- Place the square chewing gum on the table with the right gripper -- Place the blue blackboard erasure on the table with the left gripper -- Grasp the pink towel with the left gripper -- Grasp the mangosteen with the right gripper -- Place the grey towel on the table with the left gripper -- Grasp the orange with the left gripper -- Place the hollow ring bread on the table with the right gripper -- Place the blue cup on the table with the left gripper -- Place the orange on the table with the right gripper -- Place the teapot on the table with the right gripper -- Place the mangosteen on the table with the left gripper -- Grasp the fruit candy with the right gripper -- Grasp the square chewing gum with the right gripper -- Place the white blackboard erasure on the table with the right gripper -- Grasp the pink cake with the right gripper -- Place the mango on the table with the left gripper -- Place the chocolate on the table with the left gripper -- Place the hollow ring bread on the table with the right gripper -- Grasp the blue blackboard erasure with the right gripper -- Place the banana on the table with the left gripper -- Place the hard facial cleanser on the table with the right gripper -- Place the blue blackboard erasure on the table with the right gripper -- Grasp the chocolate with the left gripper -- Grasp the mango with the right gripper -- Place the orange on the table with the left gripper -- Grasp the XX with the left gripper -- Place the chocolate on the table with the right gripper -- Grasp the blue cup with the right gripper -- Grasp the mint candy with the right gripper -- Place the brown towel on the table with the right gripper -- Grasp the pen container with the left gripper -- Grasp the green lemon with the left gripper -- Grasp the tea cup with the left gripper -- Place the tea cup on the table with the left gripper -- Place the square chewing gum on the table with the left gripper -- Grasp the teapot with the left gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Grasp the pink towel with the right gripper + subtask_index: 0 +- subtask: Place the XX on the table with the left gripper + subtask_index: 1 +- subtask: Place the mint candy on the table with the left gripper + subtask_index: 2 +- subtask: Place the snickers on the table with the right gripper + subtask_index: 3 +- subtask: Grasp the pen container with the right gripper + subtask_index: 4 +- subtask: Grasp the grey towel with the left gripper + subtask_index: 5 +- subtask: Place the eyeglass case on the table with the left gripper + subtask_index: 6 +- subtask: Grasp the white duck with the left gripper + subtask_index: 7 +- subtask: Place the eggplant on the table with the right gripper + subtask_index: 8 +- subtask: Place the blue bowl on the table with the right gripper + subtask_index: 9 +- subtask: Grasp the banana with the left gripper + subtask_index: 10 +- subtask: Place the compasses on the table with the right gripper + subtask_index: 11 +- subtask: Place the sandwich on the table with the left gripper + subtask_index: 12 +- subtask: Place the pink cake on the table with the right gripper + subtask_index: 13 +- subtask: Place the banana on the table with the right gripper + subtask_index: 14 +- subtask: Grasp the compasses with the right gripper + subtask_index: 15 +- subtask: Grasp the orange with the right gripper + subtask_index: 16 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 17 +- subtask: Place the peach on the table with the right gripper + subtask_index: 18 +- subtask: Place the green lemon on the table with the right gripper + subtask_index: 19 +- subtask: Grasp the mint candy with the left gripper + subtask_index: 20 +- subtask: Grasp the sandwich with the right gripper + subtask_index: 21 +- subtask: Grasp the eyeglass case with the left gripper + subtask_index: 22 +- subtask: Place the compasses on the table with the left gripper + subtask_index: 23 +- subtask: Place the eyeglass case on the table with the right gripper + subtask_index: 24 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 25 +- subtask: Place the brown towel on the table with the left gripper + subtask_index: 26 +- subtask: Place the blue bowl on the table with the left gripper + subtask_index: 27 +- subtask: Grasp the sandwich biscuit with the right gripper + subtask_index: 28 +- subtask: Place the white blackboard erasure on the table with the left gripper + subtask_index: 29 +- subtask: Grasp the white blackboard erasure with the left gripper + subtask_index: 30 +- subtask: Grasp the snickers with the right gripper + subtask_index: 31 +- subtask: Grasp the eyeglass case with the right gripper + subtask_index: 32 +- subtask: Place the eggplant on the table with the left gripper + subtask_index: 33 +- subtask: Place the mango on the table with the right gripper + subtask_index: 34 +- subtask: Place the hard facial cleanser on the table with the right gripper + subtask_index: 35 +- subtask: Place the mint candy on the table with the right gripper + subtask_index: 36 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 37 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 38 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 39 +- subtask: Grasp the mangosteen with the left gripper + subtask_index: 40 +- subtask: Grasp the peach with the right gripper + subtask_index: 41 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 42 +- subtask: Grasp the mango with the left gripper + subtask_index: 43 +- subtask: Place the snickers on the table with the left gripper + subtask_index: 44 +- subtask: Grasp the snickers with the left gripper + subtask_index: 45 +- subtask: Grasp the lemon with the left gripper + subtask_index: 46 +- subtask: Place the teapot on the table with the left gripper + subtask_index: 47 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 48 +- subtask: Grasp the sandwich with the left gripper + subtask_index: 49 +- subtask: Place the white duck on the table with the left gripper + subtask_index: 50 +- subtask: Place the white duck on the table with the right gripper + subtask_index: 51 +- subtask: Grasp the teapot with the right gripper + subtask_index: 52 +- subtask: Place the pink towel on the table with the left gripper + subtask_index: 53 +- subtask: Grasp the hollow ring bread with the right gripper + subtask_index: 54 +- subtask: 'Grasp the hollow ring bread with the right gripper + + ' + subtask_index: 55 +- subtask: Grasp the blue bowl with the left gripper + subtask_index: 56 +- subtask: Place the waffle on the table with the right gripper + subtask_index: 57 +- subtask: Place the pen container on the table with the right gripper + subtask_index: 58 +- subtask: Place the mangosteen on the table with the right gripper + subtask_index: 59 +- subtask: Grasp the eggplant with the right gripper + subtask_index: 60 +- subtask: Grasp the pink cake with the left gripper + subtask_index: 61 +- subtask: Grasp the gray towel with the right gripper + subtask_index: 62 +- subtask: Place the pink towel on the table with the right gripper + subtask_index: 63 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 64 +- subtask: Place the green lemon on the table with the left gripper + subtask_index: 65 +- subtask: Grasp the eggplant with the left gripper + subtask_index: 66 +- subtask: Place the hard facial cleanser on the table with the left gripper + subtask_index: 67 +- subtask: Place the lemon on the table with the left gripper + subtask_index: 68 +- subtask: End + subtask_index: 69 +- subtask: Grasp the white duck with the right gripper + subtask_index: 70 +- subtask: Grasp the white blackboard erasure with the right gripper + subtask_index: 71 +- subtask: Grasp the blue blackboard erasure with the left gripper + subtask_index: 72 +- subtask: Place the sandwich biscuit on the table with the right gripper + subtask_index: 73 +- subtask: Place the sandwich on the table with the right gripper + subtask_index: 74 +- subtask: Place the fruit candy on the table with the right gripper + subtask_index: 75 +- subtask: Place the gray towel on the table with the right gripper + subtask_index: 76 +- subtask: Place the blue cup on the table with the right gripper + subtask_index: 77 +- subtask: Place the pen container on the table with the left gripper + subtask_index: 78 +- subtask: Grasp the compasses with the left gripper + subtask_index: 79 +- subtask: Grasp the waffle with the right gripper + subtask_index: 80 +- subtask: Grasp the blue bowl with the right gripper + subtask_index: 81 +- subtask: Place the pink cake on the table with the left gripper + subtask_index: 82 +- subtask: Abnormal + subtask_index: 83 +- subtask: Place the square chewing gum on the table with the right gripper + subtask_index: 84 +- subtask: Place the blue blackboard erasure on the table with the left gripper + subtask_index: 85 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 86 +- subtask: Grasp the mangosteen with the right gripper + subtask_index: 87 +- subtask: Place the grey towel on the table with the left gripper + subtask_index: 88 +- subtask: Grasp the orange with the left gripper + subtask_index: 89 +- subtask: Place the hollow ring bread on the table with the right gripper + subtask_index: 90 +- subtask: Place the blue cup on the table with the left gripper + subtask_index: 91 +- subtask: Place the orange on the table with the right gripper + subtask_index: 92 +- subtask: Place the teapot on the table with the right gripper + subtask_index: 93 +- subtask: Place the mangosteen on the table with the left gripper + subtask_index: 94 +- subtask: Grasp the fruit candy with the right gripper + subtask_index: 95 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 96 +- subtask: Place the white blackboard erasure on the table with the right gripper + subtask_index: 97 +- subtask: Grasp the pink cake with the right gripper + subtask_index: 98 +- subtask: Place the mango on the table with the left gripper + subtask_index: 99 +- subtask: Place the chocolate on the table with the left gripper + subtask_index: 100 +- subtask: 'Place the hollow ring bread on the table with the right gripper + + ' + subtask_index: 101 +- subtask: Grasp the blue blackboard erasure with the right gripper + subtask_index: 102 +- subtask: Place the banana on the table with the left gripper + subtask_index: 103 +- subtask: 'Place the hard facial cleanser on the table with the right gripper + + ' + subtask_index: 104 +- subtask: Place the blue blackboard erasure on the table with the right gripper + subtask_index: 105 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 106 +- subtask: Grasp the mango with the right gripper + subtask_index: 107 +- subtask: Place the orange on the table with the left gripper + subtask_index: 108 +- subtask: Grasp the XX with the left gripper + subtask_index: 109 +- subtask: Place the chocolate on the table with the right gripper + subtask_index: 110 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 111 +- subtask: Grasp the mint candy with the right gripper + subtask_index: 112 +- subtask: Place the brown towel on the table with the right gripper + subtask_index: 113 +- subtask: Grasp the pen container with the left gripper + subtask_index: 114 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 115 +- subtask: Grasp the tea cup with the left gripper + subtask_index: 116 +- subtask: Place the tea cup on the table with the left gripper + subtask_index: 117 +- subtask: Place the square chewing gum on the table with the left gripper + subtask_index: 118 +- subtask: Grasp the teapot with the left gripper + subtask_index: 119 +- subtask: Grasp the banana with the right gripper + subtask_index: 120 +- subtask: 'null' + subtask_index: 121 atomic_actions: - rasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -358,13 +487,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -372,8 +498,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 198 total_frames: 100817 fps: 30 @@ -474,11 +599,9 @@ data_structure: 'Agilex_Cobot_Magic_move_object_red_tablecloth_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:197 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -751,7 +874,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -759,7 +882,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -786,443 +908,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_object_red_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial & convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the gripper move the object. - sub_tasks: - - subtask: Grasp the pink towel with the right gripper - subtask_index: 0 - - subtask: Place the XX on the table with the left gripper - subtask_index: 1 - - subtask: Place the mint candy on the table with the left gripper - subtask_index: 2 - - subtask: Place the snickers on the table with the right gripper - subtask_index: 3 - - subtask: Grasp the pen container with the right gripper - subtask_index: 4 - - subtask: Grasp the grey towel with the left gripper - subtask_index: 5 - - subtask: Place the eyeglass case on the table with the left gripper - subtask_index: 6 - - subtask: Grasp the white duck with the left gripper - subtask_index: 7 - - subtask: Place the eggplant on the table with the right gripper - subtask_index: 8 - - subtask: Place the blue bowl on the table with the right gripper - subtask_index: 9 - - subtask: Grasp the banana with the left gripper - subtask_index: 10 - - subtask: Place the compasses on the table with the right gripper - subtask_index: 11 - - subtask: Place the sandwich on the table with the left gripper - subtask_index: 12 - - subtask: Place the pink cake on the table with the right gripper - subtask_index: 13 - - subtask: Place the banana on the table with the right gripper - subtask_index: 14 - - subtask: Grasp the compasses with the right gripper - subtask_index: 15 - - subtask: Grasp the orange with the right gripper - subtask_index: 16 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 17 - - subtask: Place the peach on the table with the right gripper - subtask_index: 18 - - subtask: Place the green lemon on the table with the right gripper - subtask_index: 19 - - subtask: Grasp the mint candy with the left gripper - subtask_index: 20 - - subtask: Grasp the sandwich with the right gripper - subtask_index: 21 - - subtask: Grasp the eyeglass case with the left gripper - subtask_index: 22 - - subtask: Place the compasses on the table with the left gripper - subtask_index: 23 - - subtask: Place the eyeglass case on the table with the right gripper - subtask_index: 24 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 25 - - subtask: Place the brown towel on the table with the left gripper - subtask_index: 26 - - subtask: Place the blue bowl on the table with the left gripper - subtask_index: 27 - - subtask: Grasp the sandwich biscuit with the right gripper - subtask_index: 28 - - subtask: Place the white blackboard erasure on the table with the left gripper - subtask_index: 29 - - subtask: Grasp the white blackboard erasure with the left gripper - subtask_index: 30 - - subtask: Grasp the snickers with the right gripper - subtask_index: 31 - - subtask: Grasp the eyeglass case with the right gripper - subtask_index: 32 - - subtask: Place the eggplant on the table with the left gripper - subtask_index: 33 - - subtask: Place the mango on the table with the right gripper - subtask_index: 34 - - subtask: Place the hard facial cleanser on the table with the right gripper - subtask_index: 35 - - subtask: Place the mint candy on the table with the right gripper - subtask_index: 36 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 37 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 38 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 39 - - subtask: Grasp the mangosteen with the left gripper - subtask_index: 40 - - subtask: Grasp the peach with the right gripper - subtask_index: 41 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 42 - - subtask: Grasp the mango with the left gripper - subtask_index: 43 - - subtask: Place the snickers on the table with the left gripper - subtask_index: 44 - - subtask: Grasp the snickers with the left gripper - subtask_index: 45 - - subtask: Grasp the lemon with the left gripper - subtask_index: 46 - - subtask: Place the teapot on the table with the left gripper - subtask_index: 47 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 48 - - subtask: Grasp the sandwich with the left gripper - subtask_index: 49 - - subtask: Place the white duck on the table with the left gripper - subtask_index: 50 - - subtask: Place the white duck on the table with the right gripper - subtask_index: 51 - - subtask: Grasp the teapot with the right gripper - subtask_index: 52 - - subtask: Place the pink towel on the table with the left gripper - subtask_index: 53 - - subtask: Grasp the hollow ring bread with the right gripper - subtask_index: 54 - - subtask: 'Grasp the hollow ring bread with the right gripper - - ' - subtask_index: 55 - - subtask: Grasp the blue bowl with the left gripper - subtask_index: 56 - - subtask: Place the waffle on the table with the right gripper - subtask_index: 57 - - subtask: Place the pen container on the table with the right gripper - subtask_index: 58 - - subtask: Place the mangosteen on the table with the right gripper - subtask_index: 59 - - subtask: Grasp the eggplant with the right gripper - subtask_index: 60 - - subtask: Grasp the pink cake with the left gripper - subtask_index: 61 - - subtask: Grasp the gray towel with the right gripper - subtask_index: 62 - - subtask: Place the pink towel on the table with the right gripper - subtask_index: 63 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 64 - - subtask: Place the green lemon on the table with the left gripper - subtask_index: 65 - - subtask: Grasp the eggplant with the left gripper - subtask_index: 66 - - subtask: Place the hard facial cleanser on the table with the left gripper - subtask_index: 67 - - subtask: Place the lemon on the table with the left gripper - subtask_index: 68 - - subtask: End - subtask_index: 69 - - subtask: Grasp the white duck with the right gripper - subtask_index: 70 - - subtask: Grasp the white blackboard erasure with the right gripper - subtask_index: 71 - - subtask: Grasp the blue blackboard erasure with the left gripper - subtask_index: 72 - - subtask: Place the sandwich biscuit on the table with the right gripper - subtask_index: 73 - - subtask: Place the sandwich on the table with the right gripper - subtask_index: 74 - - subtask: Place the fruit candy on the table with the right gripper - subtask_index: 75 - - subtask: Place the gray towel on the table with the right gripper - subtask_index: 76 - - subtask: Place the blue cup on the table with the right gripper - subtask_index: 77 - - subtask: Place the pen container on the table with the left gripper - subtask_index: 78 - - subtask: Grasp the compasses with the left gripper - subtask_index: 79 - - subtask: Grasp the waffle with the right gripper - subtask_index: 80 - - subtask: Grasp the blue bowl with the right gripper - subtask_index: 81 - - subtask: Place the pink cake on the table with the left gripper - subtask_index: 82 - - subtask: Abnormal - subtask_index: 83 - - subtask: Place the square chewing gum on the table with the right gripper - subtask_index: 84 - - subtask: Place the blue blackboard erasure on the table with the left gripper - subtask_index: 85 - - subtask: Grasp the pink towel with the left gripper - subtask_index: 86 - - subtask: Grasp the mangosteen with the right gripper - subtask_index: 87 - - subtask: Place the grey towel on the table with the left gripper - subtask_index: 88 - - subtask: Grasp the orange with the left gripper - subtask_index: 89 - - subtask: Place the hollow ring bread on the table with the right gripper - subtask_index: 90 - - subtask: Place the blue cup on the table with the left gripper - subtask_index: 91 - - subtask: Place the orange on the table with the right gripper - subtask_index: 92 - - subtask: Place the teapot on the table with the right gripper - subtask_index: 93 - - subtask: Place the mangosteen on the table with the left gripper - subtask_index: 94 - - subtask: Grasp the fruit candy with the right gripper - subtask_index: 95 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 96 - - subtask: Place the white blackboard erasure on the table with the right gripper - subtask_index: 97 - - subtask: Grasp the pink cake with the right gripper - subtask_index: 98 - - subtask: Place the mango on the table with the left gripper - subtask_index: 99 - - subtask: Place the chocolate on the table with the left gripper - subtask_index: 100 - - subtask: 'Place the hollow ring bread on the table with the right gripper - - ' - subtask_index: 101 - - subtask: Grasp the blue blackboard erasure with the right gripper - subtask_index: 102 - - subtask: Place the banana on the table with the left gripper - subtask_index: 103 - - subtask: 'Place the hard facial cleanser on the table with the right gripper - - ' - subtask_index: 104 - - subtask: Place the blue blackboard erasure on the table with the right gripper - subtask_index: 105 - - subtask: Grasp the chocolate with the left gripper - subtask_index: 106 - - subtask: Grasp the mango with the right gripper - subtask_index: 107 - - subtask: Place the orange on the table with the left gripper - subtask_index: 108 - - subtask: Grasp the XX with the left gripper - subtask_index: 109 - - subtask: Place the chocolate on the table with the right gripper - subtask_index: 110 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 111 - - subtask: Grasp the mint candy with the right gripper - subtask_index: 112 - - subtask: Place the brown towel on the table with the right gripper - subtask_index: 113 - - subtask: Grasp the pen container with the left gripper - subtask_index: 114 - - subtask: Grasp the green lemon with the left gripper - subtask_index: 115 - - subtask: Grasp the tea cup with the left gripper - subtask_index: 116 - - subtask: Place the tea cup on the table with the left gripper - subtask_index: 117 - - subtask: Place the square chewing gum on the table with the left gripper - subtask_index: 118 - - subtask: Grasp the teapot with the left gripper - subtask_index: 119 - - subtask: Grasp the banana with the right gripper - subtask_index: 120 - - subtask: 'null' - subtask_index: 121 - atomic_actions: - - rasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 100817 - dataset_size: 3.23 GB - data_structure: 'Agilex_Cobot_Magic_move_object_red_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (186 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_move_pencil_sharpener.yaml b/dataset_info/Agilex_Cobot_Magic_move_pencil_sharpener.yaml index be4762d3e4138414b3772bec8cc1e780d61e37e6..6bb1986c832ab7ab12abb829354c4d048b4bfae0 100644 --- a/dataset_info/Agilex_Cobot_Magic_move_pencil_sharpener.yaml +++ b/dataset_info/Agilex_Cobot_Magic_move_pencil_sharpener.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -69,28 +69,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Place the pencil sharpener on the left side of the stapler. +task_instruction: +- Place the pencil sharpener on the left side of the stapler. sub_tasks: -- Place the pencil sharpener to the left of the stapler with the left gripper -- Grasp the pencil sharpene with the right gripper -- End -- Grasp the pencil sharpene with the left gripper -- Place the pencil sharpener to the left of the stapler with the right gripper -- 'null' +- subtask: Place the pencil sharpener to the left of the stapler with the left gripper + subtask_index: 0 +- subtask: Grasp the pencil sharpene with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the pencil sharpene with the left gripper + subtask_index: 3 +- subtask: Place the pencil sharpener to the left of the stapler with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -98,13 +105,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -112,8 +116,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 41602 fps: 30 @@ -214,11 +217,9 @@ data_structure: 'Agilex_Cobot_Magic_move_pencil_sharpener_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -491,7 +492,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -499,7 +500,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -526,206 +526,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_move_pencil_sharpener - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Place the pencil sharpener on the left side of the stapler. - sub_tasks: - - subtask: Place the pencil sharpener to the left of the stapler with the left gripper - subtask_index: 0 - - subtask: Grasp the pencil sharpene with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the pencil sharpene with the left gripper - subtask_index: 3 - - subtask: Place the pencil sharpener to the left of the stapler with the right - gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 41602 - dataset_size: 374.66 MB - data_structure: 'Agilex_Cobot_Magic_move_pencil_sharpener_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_open_drawer_bottom.yaml b/dataset_info/Agilex_Cobot_Magic_open_drawer_bottom.yaml index de40b31fb7af0744e19694ddd2c86792e40eec3f..6f0240fe37f8c158ea055b670ab89c51910da9bf 100644 --- a/dataset_info/Agilex_Cobot_Magic_open_drawer_bottom.yaml +++ b/dataset_info/Agilex_Cobot_Magic_open_drawer_bottom.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -45,28 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the bottom drawer. +task_instruction: +- open the bottom drawer. sub_tasks: -- end -- Pull open the drawer with the left gripper -- Abnormal -- Grab the bottom drawer with the right gripper -- Pull open the drawer with the right gripper -- Grab the bottom drawer with the left gripper -- 'null' +- subtask: end + subtask_index: 0 +- subtask: Pull open the drawer with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Grab the bottom drawer with the right gripper + subtask_index: 3 +- subtask: Pull open the drawer with the right gripper + subtask_index: 4 +- subtask: Grab the bottom drawer with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pull -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -74,13 +82,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -88,8 +93,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 18620 fps: 30 @@ -190,11 +194,9 @@ data_structure: 'Agilex_Cobot_Magic_open_drawer_bottom_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -467,7 +469,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -475,7 +477,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -502,206 +503,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_open_drawer_bottom - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the bottom drawer. - sub_tasks: - - subtask: end - subtask_index: 0 - - subtask: Pull open the drawer with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Grab the bottom drawer with the right gripper - subtask_index: 3 - - subtask: Pull open the drawer with the right gripper - subtask_index: 4 - - subtask: Grab the bottom drawer with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pull - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 18620 - dataset_size: 182.16 MB - data_structure: 'Agilex_Cobot_Magic_open_drawer_bottom_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml b/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml index e6ec6dfe494331198e028f65e06f6047a98420a1..fd12adab78fa6a9366d31d2207843a32ab4ede17 100644 --- a/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml +++ b/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,28 +51,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Put the test tube on the table back onto the test tube rack. +task_instruction: +- Put the test tube on the table back onto the test tube rack. sub_tasks: -- Place the test tube on the test tube rack with the left gripper -- Abnormal -- Pick up the test tube with the right gripper -- Move the test tube from the right gripper to the left gripper -- End -- 'null' +- subtask: Place the test tube on the test tube rack with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Pick up the test tube with the right gripper + subtask_index: 2 +- subtask: Move the test tube from the right gripper to the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +87,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 197 total_frames: 159747 fps: 30 @@ -180,11 +183,9 @@ data_structure: 'Agilex_Cobot_Magic_organize_test_tube_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:196 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -457,7 +458,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -465,7 +466,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -492,189 +492,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_organize_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: laboratory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Put the test tube on the table back onto the test tube rack. - sub_tasks: - - subtask: Place the test tube on the test tube rack with the left gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Pick up the test tube with the right gripper - subtask_index: 2 - - subtask: Move the test tube from the right gripper to the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 159747 - dataset_size: 1.92 GB - data_structure: 'Agilex_Cobot_Magic_organize_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (185 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_place_towel_flat.yaml b/dataset_info/Agilex_Cobot_Magic_place_towel_flat.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0d0219cf1d3c33f79f0aebf2b8a17fca91c2c090 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_place_towel_flat.yaml @@ -0,0 +1,488 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_place_towel_flat +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: basket + level1: home_storage + level2: basket + level3: null + level4: null + level5: null +- object_name: blue_towel + level1: daily_necessities + level2: blue_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- take out the towel from the basket and lay it flat on the table. +sub_tasks: +- subtask: Right hand:adjust the blue towel + subtask_index: 0 +- subtask: 'Left hand: grab the brown towel' + subtask_index: 1 +- subtask: 'Left hand: lift the blue towel to the center of view' + subtask_index: 2 +- subtask: 'Right hand: grab a corner of the blue towel and straighten it' + subtask_index: 3 +- subtask: 'Right hand: spread the blue towel flat on the table' + subtask_index: 4 +- subtask: 'Right hand: grab a corner of the purple towel and straighten it' + subtask_index: 5 +- subtask: 'Left hand: spread the brown towel flat on the table' + subtask_index: 6 +- subtask: 'Right hand: spread the purple towel flat on the table' + subtask_index: 7 +- subtask: 'Right hand: spread the grey towel flat on the table' + subtask_index: 8 +- subtask: 'Left hand: spread the purple towel flat on the table' + subtask_index: 9 +- subtask: Left hand:adjust the gray towel + subtask_index: 10 +- subtask: Left hand:adjust the brown towel + subtask_index: 11 +- subtask: Abnormal + subtask_index: 12 +- subtask: 'Right hand: grab a corner of the grey towel and straighten it' + subtask_index: 13 +- subtask: 'Left hand: grab the grey towel' + subtask_index: 14 +- subtask: 'Left hand: lift the purple towel to the center of view' + subtask_index: 15 +- subtask: Right hand:adjust the brown towel + subtask_index: 16 +- subtask: 'Left hand: lift the brown towel to the center of view' + subtask_index: 17 +- subtask: 'Left hand: grab the blue towel' + subtask_index: 18 +- subtask: 'Left hand: lift the grey towel to the center of view' + subtask_index: 19 +- subtask: 'Right hand: spread the brown towel flat on the table' + subtask_index: 20 +- subtask: Left hand:adjust the blue towel + subtask_index: 21 +- subtask: 'Right hand: grab a corner of the brown towel and straighten it' + subtask_index: 22 +- subtask: 'Left hand: spread the grey towel flat on the table' + subtask_index: 23 +- subtask: End + subtask_index: 24 +- subtask: Right hand:adjust the gray towel + subtask_index: 25 +- subtask: 'Left hand: spread the blue towel flat on the table' + subtask_index: 26 +- subtask: 'Left hand: grab the purple towel' + subtask_index: 27 +- subtask: 'null' + subtask_index: 28 +atomic_actions: +- grasp +- fold +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 962 + total_frames: 1023212 + fps: 30 + total_tasks: 29 + total_videos: 2886 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 62.89 GB +frame_num: 1023212 +dataset_size: 62.89 GB +data_structure: "Agilex_Cobot_Magic_place_towel_flat_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (950 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:961 + val: 795:894 + test: 894:994 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_head_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml b/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml index bbb6fc68e8d9c0d663820bd7b593d506afc5e30f..fd652059e6d93e030a8edf0e411c723a3a26a83e 100644 --- a/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml +++ b/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,47 +51,70 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to hold the bottle of the drink and pour it into a - random cup. +task_instruction: +- use a gripper to hold the bottle of the drink and pour it into a random cup. sub_tasks: -- Pour the cranberry juice into the cyan cup with the right gripper -- Pour the cranberry juice into the paper cup with the right gripper -- end -- Pour the cranberry juice into the White Plastic Cup with the left gripper -- Place the mineral water bottle on the table with the left gripper -- Pour the mineral water into the paper cup with the left gripper -- Grasp the mineral water bottle with the left gripper -- Pour the cranberry juice into the cyan cup with the left gripper -- Pour the cranberry juice into the White Plastic Cup with the right gripper -- Abnormal -- Pour the mineral water into the glass with the right gripper -- Place the cranberry juice bottle on the table with the right gripper -- Pour the cranberry juice into the paper cup with the left gripper -- Grasp the cranberry juice bottle with the right gripper -- Pour the mineral water into the cyan cup with the left gripper -- Grasp the cranberry juice bottle with the left gripper -- Place the cranberry juice bottle on the table with the left gripper -- Place the mineral water bottle on the table with the right gripper -- Grasp the mineral water bottle with the right gripper -- Pour the mineral water into the cyan cup with the right gripper -- Pour the mineral water into the paper cup with the right gripper -- Pour the mineral water into the glass with the left gripper -- 'null' +- subtask: Pour the cranberry juice into the cyan cup with the right gripper + subtask_index: 0 +- subtask: Pour the cranberry juice into the paper cup with the right gripper + subtask_index: 1 +- subtask: end + subtask_index: 2 +- subtask: Pour the cranberry juice into the White Plastic Cup with the left gripper + subtask_index: 3 +- subtask: Place the mineral water bottle on the table with the left gripper + subtask_index: 4 +- subtask: Pour the mineral water into the paper cup with the left gripper + subtask_index: 5 +- subtask: Grasp the mineral water bottle with the left gripper + subtask_index: 6 +- subtask: Pour the cranberry juice into the cyan cup with the left gripper + subtask_index: 7 +- subtask: Pour the cranberry juice into the White Plastic Cup with the right gripper + subtask_index: 8 +- subtask: Abnormal + subtask_index: 9 +- subtask: Pour the mineral water into the glass with the right gripper + subtask_index: 10 +- subtask: Place the cranberry juice bottle on the table with the right gripper + subtask_index: 11 +- subtask: Pour the cranberry juice into the paper cup with the left gripper + subtask_index: 12 +- subtask: Grasp the cranberry juice bottle with the right gripper + subtask_index: 13 +- subtask: Pour the mineral water into the cyan cup with the left gripper + subtask_index: 14 +- subtask: Grasp the cranberry juice bottle with the left gripper + subtask_index: 15 +- subtask: Place the cranberry juice bottle on the table with the left gripper + subtask_index: 16 +- subtask: Place the mineral water bottle on the table with the right gripper + subtask_index: 17 +- subtask: Grasp the mineral water bottle with the right gripper + subtask_index: 18 +- subtask: Pour the mineral water into the cyan cup with the right gripper + subtask_index: 19 +- subtask: Pour the mineral water into the paper cup with the right gripper + subtask_index: 20 +- subtask: Pour the mineral water into the glass with the left gripper + subtask_index: 21 +- subtask: 'null' + subtask_index: 22 atomic_actions: - grasp - lift - pour - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -99,13 +122,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -113,8 +133,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 195 total_frames: 201530 fps: 30 @@ -199,11 +218,9 @@ data_structure: 'Agilex_Cobot_Magic_pour_drink_bottle_cup_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:194 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -476,7 +493,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -484,7 +501,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -511,224 +527,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_pour_drink_bottle_cup - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to hold the bottle of the drink and pour it into a random cup. - sub_tasks: - - subtask: Pour the cranberry juice into the cyan cup with the right gripper - subtask_index: 0 - - subtask: Pour the cranberry juice into the paper cup with the right gripper - subtask_index: 1 - - subtask: end - subtask_index: 2 - - subtask: Pour the cranberry juice into the White Plastic Cup with the left gripper - subtask_index: 3 - - subtask: Place the mineral water bottle on the table with the left gripper - subtask_index: 4 - - subtask: Pour the mineral water into the paper cup with the left gripper - subtask_index: 5 - - subtask: Grasp the mineral water bottle with the left gripper - subtask_index: 6 - - subtask: Pour the cranberry juice into the cyan cup with the left gripper - subtask_index: 7 - - subtask: Pour the cranberry juice into the White Plastic Cup with the right gripper - subtask_index: 8 - - subtask: Abnormal - subtask_index: 9 - - subtask: Pour the mineral water into the glass with the right gripper - subtask_index: 10 - - subtask: Place the cranberry juice bottle on the table with the right gripper - subtask_index: 11 - - subtask: Pour the cranberry juice into the paper cup with the left gripper - subtask_index: 12 - - subtask: Grasp the cranberry juice bottle with the right gripper - subtask_index: 13 - - subtask: Pour the mineral water into the cyan cup with the left gripper - subtask_index: 14 - - subtask: Grasp the cranberry juice bottle with the left gripper - subtask_index: 15 - - subtask: Place the cranberry juice bottle on the table with the left gripper - subtask_index: 16 - - subtask: Place the mineral water bottle on the table with the right gripper - subtask_index: 17 - - subtask: Grasp the mineral water bottle with the right gripper - subtask_index: 18 - - subtask: Pour the mineral water into the cyan cup with the right gripper - subtask_index: 19 - - subtask: Pour the mineral water into the paper cup with the right gripper - subtask_index: 20 - - subtask: Pour the mineral water into the glass with the left gripper - subtask_index: 21 - - subtask: 'null' - subtask_index: 22 - atomic_actions: - - grasp - - lift - - pour - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 201530 - dataset_size: 1.58 GB - data_structure: 'Agilex_Cobot_Magic_pour_drink_bottle_cup_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (183 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup_cup.yaml b/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup_cup.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8614f13028bf21278cd554e3a07fa130ae585220 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup_cup.yaml @@ -0,0 +1,515 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_pour_drink_bottle_cup_cup +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: cup + level1: kitchen_supplies + level2: cup + level3: null + level4: null + level5: null +- object_name: water_Bottle + level1: beverages + level2: water_bottle + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a clamp to hold the bottle of the drink, pour the beverage into a random cup, + then clamp the cup containing the drink and pour the drink into another random cup. +sub_tasks: +- subtask: Right gripper + subtask_index: 0 +- subtask: Pour the water into the blue plastic cup with left gripper + subtask_index: 1 +- subtask: Pour water into the green plastic cup with right gripper + subtask_index: 2 +- subtask: Place the water bottle on the table with right gripper + subtask_index: 3 +- subtask: Pour water into the green plastic cup with left gripper + subtask_index: 4 +- subtask: Pour the drink into the blue plastic cup with left gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Left gripper + subtask_index: 7 +- subtask: Pick up the blue plastic cup bottle with right gripper + subtask_index: 8 +- subtask: Pick up the green plastic cup bottle with right gripper + subtask_index: 9 +- subtask: Place the green plastic cup on the table with right gripper + subtask_index: 10 +- subtask: Pick up the drink bottle with right gripper + subtask_index: 11 +- subtask: Pour water into the paper cup with right gripper + subtask_index: 12 +- subtask: Place the blue plastic cup on the table with left gripper + subtask_index: 13 +- subtask: Pour the water into the green plastic cup with left gripper + subtask_index: 14 +- subtask: Pick up the drink bottle with left gripper + subtask_index: 15 +- subtask: Place the paper cup on the table with left gripper + subtask_index: 16 +- subtask: Pour water into the blue plastic cup with right gripper + subtask_index: 17 +- subtask: Pick up the blue plastic cup with left gripper + subtask_index: 18 +- subtask: Place the blue plastic cup on the table with right gripper + subtask_index: 19 +- subtask: Pour water into the blue plastic cup with left gripper + subtask_index: 20 +- subtask: Grasp the drink bottle with right gripper + subtask_index: 21 +- subtask: Pour water into the paper cup with left gripper + subtask_index: 22 +- subtask: Pick up the green plastic cup with left gripper + subtask_index: 23 +- subtask: Pick up the water bottle with left gripper + subtask_index: 24 +- subtask: Place the green plastic cup on the table with left gripper + subtask_index: 25 +- subtask: Pick up the paper cup with right gripper + subtask_index: 26 +- subtask: Pick up the green plastic cup bottle with left gripper + subtask_index: 27 +- subtask: Pick up the blue plastic cup with right gripper + subtask_index: 28 +- subtask: Pour the drink into the green plastic cup with left gripper + subtask_index: 29 +- subtask: Pick up the blue plastic cup bottle with left gripper + subtask_index: 30 +- subtask: Pick up the green plastic cup with right gripper + subtask_index: 31 +- subtask: Place the paper cup on the table with right gripper + subtask_index: 32 +- subtask: Pour the drink into the paper cup with right gripper + subtask_index: 33 +- subtask: Place the drink bottle on the table with right gripper + subtask_index: 34 +- subtask: Pick up the water bottle with right gripper + subtask_index: 35 +- subtask: Place the water bottle on the table with left gripper + subtask_index: 36 +- subtask: Place the drink bottle on the table with left gripper + subtask_index: 37 +- subtask: Pour the drink into the blue plastic cup with right gripper + subtask_index: 38 +- subtask: Pick up the paper cup with left gripper + subtask_index: 39 +- subtask: Pour the drink into the paper cup with left gripper + subtask_index: 40 +- subtask: Pour the drink into the green plastic cup with right gripper + subtask_index: 41 +- subtask: 'null' + subtask_index: 42 +atomic_actions: +- grasp +- lift +- pour +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 200 + total_frames: 342450 + fps: 30 + total_tasks: 43 + total_videos: 600 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 2.67 GB +frame_num: 342450 +dataset_size: 2.67 GB +data_structure: "Agilex_Cobot_Magic_pour_drink_bottle_cup_cup_qced_hardlink/\n|--\ + \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ + | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (188 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:199 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_pour_water_bottle_cup_cup.yaml b/dataset_info/Agilex_Cobot_Magic_pour_water_bottle_cup_cup.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f3444bf9ff31832ae0908c3ae8c8aecddb6061e2 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_pour_water_bottle_cup_cup.yaml @@ -0,0 +1,471 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_pour_water_bottle_cup_cup +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: transparent_bottle + level1: beverages + level2: transparent_bottle + level3: null + level4: null + level5: null +- object_name: pink_clear_plastic_cup + level1: kitchen_supplies + level2: pink_clear_plastic_cup + level3: null + level4: null + level5: null +- object_name: blue_clear_plastic_cup + level1: kitchen_supplies + level2: blue_clear_plastic_cup + level3: null + level4: null + level5: null +- object_name: black_clear_plastic_cup + level1: kitchen_supplies + level2: black_clear_plastic_cup + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the bottle and pour water into the cup, then pick up the cup containing + water and pour it into another cup. +sub_tasks: +- subtask: Place the brown bottle with the right gripper + subtask_index: 0 +- subtask: Grasp the brown bottle with the left gripper + subtask_index: 1 +- subtask: Place the blue cup with the right gripper + subtask_index: 2 +- subtask: Pour water from blue cup to red cup with the left gripper + subtask_index: 3 +- subtask: Place the blue cup with the left gripper + subtask_index: 4 +- subtask: Abnormal + subtask_index: 5 +- subtask: Pour water from brown bottle to blue cup with the left gripper + subtask_index: 6 +- subtask: Place the brown bottle with the left gripper + subtask_index: 7 +- subtask: End + subtask_index: 8 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 9 +- subtask: Pour water from brown bottle to blue cup with the right gripper + subtask_index: 10 +- subtask: Grasp the brown bottle with the right gripper + subtask_index: 11 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 12 +- subtask: Pour water from blue cup to red cup with the right gripper + subtask_index: 13 +- subtask: 'null' + subtask_index: 14 +atomic_actions: +- grasp +- lift +- lower +- pour +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 198 + total_frames: 240141 + fps: 30 + total_tasks: 15 + total_videos: 594 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 2.00 GB +frame_num: 240141 +dataset_size: 2.00 GB +data_structure: "Agilex_Cobot_Magic_pour_water_bottle_cup_cup_qced_hardlink/\n|--\ + \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ + | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (186 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:197 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_pour_water_middle_cup.yaml b/dataset_info/Agilex_Cobot_Magic_pour_water_middle_cup.yaml new file mode 100644 index 0000000000000000000000000000000000000000..afe67e89b9a575c2ea523dfbad2a9a3b84cba6f2 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_pour_water_middle_cup.yaml @@ -0,0 +1,477 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_pour_water_middle_cup +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: transparent_bottle + level1: beverages + level2: transparent_bottle + level3: null + level4: null + level5: null +- object_name: pink_clear_plastic_cup + level1: kitchen_supplies + level2: pink_clear_plastic_cup + level3: null + level4: null + level5: null +- object_name: blue_clear_plastic_cup + level1: kitchen_supplies + level2: blue_clear_plastic_cup + level3: null + level4: null + level5: null +- object_name: black_clear_plastic_cup + level1: kitchen_supplies + level2: black_clear_plastic_cup + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the bottle filled with water and pour water into the middle cup of the three + cups. +sub_tasks: +- subtask: Abnormal + subtask_index: 0 +- subtask: Pour water from brown bottle to red cup with the right gripper + subtask_index: 1 +- subtask: Grasp the transparent bottle with the left gripper + subtask_index: 2 +- subtask: Pour water from transparent bottle to grey cup with the right gripper + subtask_index: 3 +- subtask: Pour water from transparent bottle to red cup with the left gripper + subtask_index: 4 +- subtask: Pour water from transparent bottle to blue cup with the right gripper + subtask_index: 5 +- subtask: Pour water from transparent bottle to red cup with the right gripper + subtask_index: 6 +- subtask: Pour water from brown bottle to grey cup with the right gripper + subtask_index: 7 +- subtask: Pour water from transparent bottle to brown cup with the left gripper + subtask_index: 8 +- subtask: End + subtask_index: 9 +- subtask: Grasp the transparent bottle with the right gripper + subtask_index: 10 +- subtask: Pour water from brown bottle to blue cup with the right gripper + subtask_index: 11 +- subtask: Pour water from transparent bottle tobrown cup with the right gripper + subtask_index: 12 +- subtask: Place the transparent bottle with the right gripper + subtask_index: 13 +- subtask: Grasp the brown bottle with the right gripper + subtask_index: 14 +- subtask: Place the transparent bottle with the left gripper + subtask_index: 15 +- subtask: Place the brown bottle with the right gripper + subtask_index: 16 +- subtask: 'null' + subtask_index: 17 +atomic_actions: +- grasp +- lift +- lower +- pour +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 192 + total_frames: 130785 + fps: 30 + total_tasks: 18 + total_videos: 576 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 1.10 GB +frame_num: 130785 +dataset_size: 1.10 GB +data_structure: "Agilex_Cobot_Magic_pour_water_middle_cup_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (180 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:191 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_pour_water_pink_cup.yaml b/dataset_info/Agilex_Cobot_Magic_pour_water_pink_cup.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3d05cf3b9e20997adbcde6799fad25921e460e0b --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_pour_water_pink_cup.yaml @@ -0,0 +1,456 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_pour_water_pink_cup +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: transparent_bottle + level1: beverages + level2: transparent_bottle + level3: null + level4: null + level5: null +- object_name: pink_clear_plastic_cup + level1: kitchen_supplies + level2: pink_clear_plastic_cup + level3: null + level4: null + level5: null +- object_name: blue_clear_plastic_cup + level1: kitchen_supplies + level2: blue_clear_plastic_cup + level3: null + level4: null + level5: null +- object_name: black_clear_plastic_cup + level1: kitchen_supplies + level2: black_clear_plastic_cup + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the bottle filled with water and pour it into the pink cup. +sub_tasks: +- subtask: Grasp the green bottle with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Pour water from green bottle to pink cup with the right gripper + subtask_index: 2 +- subtask: Place the green bottle with the right gripper + subtask_index: 3 +- subtask: Grasp the green bottle with the left gripper + subtask_index: 4 +- subtask: Pour water from green bottle to pink cup with the left gripper + subtask_index: 5 +- subtask: Place the green bottle with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- lift +- lower +- pour +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 68443 + fps: 30 + total_tasks: 8 + total_videos: 300 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 526.79 MB +frame_num: 68443 +dataset_size: 526.79 MB +data_structure: "Agilex_Cobot_Magic_pour_water_pink_cup_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_seal_bag.yaml b/dataset_info/Agilex_Cobot_Magic_seal_bag.yaml new file mode 100644 index 0000000000000000000000000000000000000000..86b428439df78f315f9d442b52da420b4dabead9 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_seal_bag.yaml @@ -0,0 +1,461 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_seal_bag +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial & convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: file bag zipper-bule + level1: stationery + level2: file bag zipper-bule + level3: null + level4: null + level5: null +- object_name: orange + level1: food + level2: orange + level3: null + level4: null + level5: null +- object_name: pear + level1: food + level2: pear + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- Zip up the file bag on the table. +sub_tasks: +- subtask: Lift the fruit bag with your left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Pull the fruit bag with right gripper + subtask_index: 2 +- subtask: Pull the fruit bag with left gripper + subtask_index: 3 +- subtask: Place the fruit bag with right gripper + subtask_index: 4 +- subtask: Grasp the fruit bag with right gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Lift the fruit bag with your right gripper + subtask_index: 7 +- subtask: Grasp the fruit bag with left gripper + subtask_index: 8 +- subtask: Place the fruit bag with left gripper + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 398 + total_frames: 264657 + fps: 30 + total_tasks: 11 + total_videos: 1194 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 2.90 GB +frame_num: 264657 +dataset_size: 2.90 GB +data_structure: "Agilex_Cobot_Magic_seal_bag_qced_hardlink/\n|-- annotations\n| \ + \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| \ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (386 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:397 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_stack_block.yaml b/dataset_info/Agilex_Cobot_Magic_stack_block.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0ff36deae721433a0e8523e78773bdaf66c8e8b6 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_stack_block.yaml @@ -0,0 +1,1635 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_stack_block +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: square_building_blocks + level1: toys + level2: square_building_blocks + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- According to the building block template in front, assemble the scattered blocks + together. +sub_tasks: +- subtask: 'Put the yellow build block on the blue build block + + ' + subtask_index: 0 +- subtask: 'Put the orange build block on the left of the blue build block + + ' + subtask_index: 1 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 2 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 3 +- subtask: Put the green cube on the left of the blue cube with the left gripper + subtask_index: 4 +- subtask: 'Put the yellow build block on the blue build block + + ' + subtask_index: 5 +- subtask: 'Put the green build block on the right of the blue build block + + ' + subtask_index: 6 +- subtask: 'Put the red build block on the blue build block + + ' + subtask_index: 7 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 8 +- subtask: 'Put the red build block on the behind of the green'' build block + + ' + subtask_index: 9 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 10 +- subtask: Put the orange cube on the behind of the red cube + subtask_index: 11 +- subtask: Put the blue cube on the behind of the red cube + subtask_index: 12 +- subtask: Put the blue cube on the right of the green cube + subtask_index: 13 +- subtask: Put the green cube on the left of the yellow cube with the right gripper + subtask_index: 14 +- subtask: Grasp the green block with left gripper + subtask_index: 15 +- subtask: Put the blue cube and green cube in the center of the table with the right + gripper + subtask_index: 16 +- subtask: Put the green cube on the behind of the yellow cube + subtask_index: 17 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 18 +- subtask: Put the blue cube on the behind of the yellow cube + subtask_index: 19 +- subtask: 'Put the green build block on the orange build block + + ' + subtask_index: 20 +- subtask: Grasp the blue block with left gripper + subtask_index: 21 +- subtask: Put the green cube on the right of the red cube + subtask_index: 22 +- subtask: Grasp the orange block with left gripper + subtask_index: 23 +- subtask: 'Put the bluebuild block on the right of the yellow build block + + ' + subtask_index: 24 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 25 +- subtask: Put the yellow cube on the right of the red cube + subtask_index: 26 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 27 +- subtask: Adjust the position of the cube with the right gripper + subtask_index: 28 +- subtask: Put the red cube and green cube in the center of the table with the left + gripper + subtask_index: 29 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 30 +- subtask: Put the blue cube and green cube in the center of the table with the left + gripper + subtask_index: 31 +- subtask: 'Put the red build block on the behind of the orange build block + + ' + subtask_index: 32 +- subtask: Put the green cube on the red cube + subtask_index: 33 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 34 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 35 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 36 +- subtask: Put the red cube on the left of the green cube with the left gripper + subtask_index: 37 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 38 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 39 +- subtask: Put the orange cube on the left of the yellow cube with the right gripper + subtask_index: 40 +- subtask: Put the yellow cube on the behind of the green cube + subtask_index: 41 +- subtask: 'Put the blue build block on the green build block + + ' + subtask_index: 42 +- subtask: 'Put the blue build block on the right of orange the build block + + ' + subtask_index: 43 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 44 +- subtask: Grasp the yellow block with left gripper + subtask_index: 45 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 46 +- subtask: Put the red cube on the left of the orange cube + subtask_index: 47 +- subtask: Put the green cube on the right of the yellow cube with the left gripper + subtask_index: 48 +- subtask: 'Put the blue build block on the front of the orange build block + + ' + subtask_index: 49 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 50 +- subtask: 'Put the ornage build block on the behind of the green build block + + ' + subtask_index: 51 +- subtask: 'Put the red build block on the behind of the blue build block + + ' + subtask_index: 52 +- subtask: Put the yellow cube and orange cube in the center of the table with the + right gripper + subtask_index: 53 +- subtask: Put the blue cube in the center of the table + subtask_index: 54 +- subtask: 'Put the ornage build block on the behind of the blue build block + + ' + subtask_index: 55 +- subtask: Grasp the red block with right gripper + subtask_index: 56 +- subtask: Put the orange cube on the right of the yellow cube + subtask_index: 57 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 58 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 59 +- subtask: Put the yellow cube on the front of the orange cube with the right gripper + subtask_index: 60 +- subtask: Put the green build block on the behind of the blue build block + subtask_index: 61 +- subtask: Put the yellow build block in the center of the table + subtask_index: 62 +- subtask: Put the yellow build block on the behind of the blue build block + subtask_index: 63 +- subtask: 'Put the red build block on the left of the green build block + + ' + subtask_index: 64 +- subtask: Put the orange cube on the right of the blue cube + subtask_index: 65 +- subtask: Put the yellow cube on the behind of the red cube + subtask_index: 66 +- subtask: Grasp the orange block with right gripper + subtask_index: 67 +- subtask: Put the red cube on the right of the green cube + subtask_index: 68 +- subtask: move the build block to the center of the table + subtask_index: 69 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 70 +- subtask: 'Put the greem build block on the behind of the blue build block + + ' + subtask_index: 71 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 72 +- subtask: Put the green cube on the right of the orange cube with the right gripper + subtask_index: 73 +- subtask: Put the red cube on the right of the green cube with the left gripper + subtask_index: 74 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 75 +- subtask: Put the green build block on the left of the orange build block + subtask_index: 76 +- subtask: Put the yellow cube on the front of the blue cube with the right gripper + subtask_index: 77 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 78 +- subtask: 'Put the yellow build block on the blue build block + + ' + subtask_index: 79 +- subtask: Put the orange cube on the behind of the blue cube + subtask_index: 80 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 81 +- subtask: Put the green cube on the right of the blue cube with the left gripper + subtask_index: 82 +- subtask: Put the yellow cube on the right of the orange cube + subtask_index: 83 +- subtask: Put the yellow cube and green cube in the center of the table with the + left gripper + subtask_index: 84 +- subtask: 'Put the blue build block on the left of the yellow build block + + ' + subtask_index: 85 +- subtask: Put the red cube and blue cube in the center of the table with the right + gripper + subtask_index: 86 +- subtask: 'Put the blue build block on the right of the red build block + + ' + subtask_index: 87 +- subtask: Put the red cube on the right of the orange cube with the right gripper + subtask_index: 88 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 89 +- subtask: Put the blue cube on the left of the green cube with the right gripper + subtask_index: 90 +- subtask: Put the blue cube on the right of the yellow cube with the right gripper + subtask_index: 91 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 92 +- subtask: Put the blue cube on the left of the orange cube + subtask_index: 93 +- subtask: Put the orange cube on the right of the green cube + subtask_index: 94 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 95 +- subtask: 'Put the green build block on the left of the blue build block + + ' + subtask_index: 96 +- subtask: Put the green cube on the left of the orange cube with the left gripper + subtask_index: 97 +- subtask: Grasp the blue block with right gripper + subtask_index: 98 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 99 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 100 +- subtask: 'Put the blue build block on the front of the green build block + + ' + subtask_index: 101 +- subtask: Put the red cube in the center of the table + subtask_index: 102 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 103 +- subtask: Put the orange cube on the right of the yellow cube with the right gripper + subtask_index: 104 +- subtask: Put the red cube on the behind of the orange cube + subtask_index: 105 +- subtask: Put the blue cube and orange cube in the center of the table with the right + gripper + subtask_index: 106 +- subtask: 'Put the green build block on the left of the blue build block + + ' + subtask_index: 107 +- subtask: 'Put the right build block on the right of the yellow build block + + ' + subtask_index: 108 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 109 +- subtask: 'Put the yellow build block on the right of the red build block + + ' + subtask_index: 110 +- subtask: 'Put the green build block on the left of the blue build block + + ' + subtask_index: 111 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 112 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 113 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 114 +- subtask: Put the orange cube on the left of the green cube with the left gripper + subtask_index: 115 +- subtask: Put the yellow cube on the right of the blue cube with the right gripper + subtask_index: 116 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 117 +- subtask: Put the red cube on the right of the orange cube with the left gripper + subtask_index: 118 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 119 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 120 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 121 +- subtask: Put the yellow cube on the right of the green cube + subtask_index: 122 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 123 +- subtask: 'Put the orange build block on the green build block + + ' + subtask_index: 124 +- subtask: Put the green cube on the front of the blue cube with the left gripper + subtask_index: 125 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 126 +- subtask: Put the green cube in the center of the table + subtask_index: 127 +- subtask: Put the yellow cube and green cube in the center of the table with the + right gripper + subtask_index: 128 +- subtask: Put the yellow cube and blue cube in the center of the table with the right + gripper + subtask_index: 129 +- subtask: Put the red cube on the behind of the yellow cube + subtask_index: 130 +- subtask: Put the XX build block on the XX build block + subtask_index: 131 +- subtask: Put the orange cube on the right of the green cube with the right gripper + subtask_index: 132 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 133 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 134 +- subtask: Put the blue cube in the front of the green cube + subtask_index: 135 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 136 +- subtask: 'Put the blue build block on the right of the red build block + + ' + subtask_index: 137 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 138 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 139 +- subtask: 'Put the green build block on the right of the blue build block + + ' + subtask_index: 140 +- subtask: ' + + Put the yellow build block in the center of the table' + subtask_index: 141 +- subtask: 'Put the orange build block on the behind of the red build block + + ' + subtask_index: 142 +- subtask: Put the red cube and orange cube in the center of the table with the right + gripper + subtask_index: 143 +- subtask: 'Put the orange build block on the behind of the green build block + + ' + subtask_index: 144 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 145 +- subtask: Put the yellow cube in the center of the table + subtask_index: 146 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 147 +- subtask: 'Put the blue build block on the right of the green build block + + ' + subtask_index: 148 +- subtask: Put the green cube on the right of the orange cube + subtask_index: 149 +- subtask: Adjust the position of the cube with the left gripper + subtask_index: 150 +- subtask: Put the green cube on the left of the yellow cube with the left gripper + subtask_index: 151 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 152 +- subtask: 'Put the orange build block on the right of the red build block + + ' + subtask_index: 153 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 154 +- subtask: 'Put the green build block on the red build block + + ' + subtask_index: 155 +- subtask: Put the yellow cube on the behind of the blue cube with the left gripper + subtask_index: 156 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 157 +- subtask: Put the orange cube on the right of the red cube with the left gripper + subtask_index: 158 +- subtask: Adjust the pose of the building blocks + subtask_index: 159 +- subtask: 'Put the blue build block on the front of the green build block + + ' + subtask_index: 160 +- subtask: Put the green cube on the right of the red cube with the right gripper + subtask_index: 161 +- subtask: 'Put theyellow build block on the behind of the blue build block + + ' + subtask_index: 162 +- subtask: Put the red cube in the center of the table with the left gripper + subtask_index: 163 +- subtask: 'Put the red build block on the green build block + + ' + subtask_index: 164 +- subtask: 'Put the yellow build block on the orange build block + + ' + subtask_index: 165 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 166 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 167 +- subtask: 'Put the red build block on the right of the green build block + + ' + subtask_index: 168 +- subtask: Put the red cube on the right of the orange cube + subtask_index: 169 +- subtask: Put the yellow cube on the left of the blue cube with the left gripper + subtask_index: 170 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 171 +- subtask: Put the yellow cube on the right of the green cube with the left gripper + subtask_index: 172 +- subtask: 'Put the green build block on the behind of the blue build block + + ' + subtask_index: 173 +- subtask: 'Put the blue build block on the orange build block + + ' + subtask_index: 174 +- subtask: 'Put the orange build block on the orange build block + + ' + subtask_index: 175 +- subtask: 'Put the green build block on the orange build block + + ' + subtask_index: 176 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 177 +- subtask: Put the blue cube in the front of the yellow cube + subtask_index: 178 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 179 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 180 +- subtask: Put the red cube and yellow cube in the center of the table with the right + gripper + subtask_index: 181 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 182 +- subtask: 'Put the orange build block on the left of the red build block + + ' + subtask_index: 183 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 184 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 185 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 186 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 187 +- subtask: Put the red cube and green cube in the center of the table with the right + gripper + subtask_index: 188 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 189 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 190 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 191 +- subtask: Put the blue cube on the left of the yellow cube with the left gripper + subtask_index: 192 +- subtask: Put the red cube on the right of the green cube with the right gripper + subtask_index: 193 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 194 +- subtask: 'Put the orange build block on the right of the red build block + + ' + subtask_index: 195 +- subtask: Put the blue build block on the right of the yellow build block + subtask_index: 196 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 197 +- subtask: Put the yellow cube and blue cube in the center of the table with the left + gripper + subtask_index: 198 +- subtask: Put the yellow cube on the left of the orange cube with the right gripper + subtask_index: 199 +- subtask: Put the red cube in the front of the orange cube + subtask_index: 200 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 201 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 202 +- subtask: Put the green build block on the behind of the blue build block + subtask_index: 203 +- subtask: Put the orange cube on the left of the red cube with the right gripper + subtask_index: 204 +- subtask: Put the green cube and orange cube in the center of the table with the + right gripper + subtask_index: 205 +- subtask: Grasp the yellow block with right gripper + subtask_index: 206 +- subtask: 'Put the orange build block on the green build block + + ' + subtask_index: 207 +- subtask: 'Put the yellow build block on the front of the red build block + + ' + subtask_index: 208 +- subtask: Put the orange cube on the right of the blue cube with the right gripper + subtask_index: 209 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 210 +- subtask: Put the yellow cube on the left of the red cube with the right gripper + subtask_index: 211 +- subtask: Abnormal + subtask_index: 212 +- subtask: 'Put the red build block on the yellow build block + + ' + subtask_index: 213 +- subtask: Put the blue cube on the behind of the orange cube + subtask_index: 214 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 215 +- subtask: Put the green cube in the center of the table with the left gripper + subtask_index: 216 +- subtask: 'Put the red build block on the front of the green build block + + ' + subtask_index: 217 +- subtask: Put the yellow cube on the behind of the blue cube + subtask_index: 218 +- subtask: 'Put the blue build block on the left of the yellow build block + + ' + subtask_index: 219 +- subtask: Put the orange cube on the left of the blue cube + subtask_index: 220 +- subtask: 'Put the orange build block on the front of the blue build block + + ' + subtask_index: 221 +- subtask: Put the yellow cube in the front of the green cube + subtask_index: 222 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 223 +- subtask: Put the green cube in the front of the yellow cube + subtask_index: 224 +- subtask: Put the red cube and orange cube in the center of the table with the left + gripper + subtask_index: 225 +- subtask: Put the yellow build block on the behind of the green build block + subtask_index: 226 +- subtask: 'Put the orange build block on the blue build block + + ' + subtask_index: 227 +- subtask: 'Put the blue build block on the left of the yellow build block + + ' + subtask_index: 228 +- subtask: Grasp the red block with left gripper + subtask_index: 229 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 230 +- subtask: 'Put the red build block on the right of the yellow build block + + ' + subtask_index: 231 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 232 +- subtask: 'Put the red build block on the right of the orange build block + + ' + subtask_index: 233 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 234 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 235 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 236 +- subtask: Put the orange cube on the right of the red cube + subtask_index: 237 +- subtask: 'Put the red build block on the right of the green build block + + ' + subtask_index: 238 +- subtask: 'Put the blue build block on the behind of the green build block + + ' + subtask_index: 239 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 240 +- subtask: Put the yellow cube on the right of the blue cube + subtask_index: 241 +- subtask: Put the yellow cube on the behind of the orange cube + subtask_index: 242 +- subtask: Put the yellow cube in the front of the orange cube + subtask_index: 243 +- subtask: 'Put the green build block on the red build block + + ' + subtask_index: 244 +- subtask: Put the green cube on the right of the yellow cube + subtask_index: 245 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 246 +- subtask: Put the red cube and blue cube in the center of the table with the left + gripper + subtask_index: 247 +- subtask: 'Put the green build block on the left of the orange build block + + ' + subtask_index: 248 +- subtask: Put the orange cube on the left of the blue cube with the right gripper + subtask_index: 249 +- subtask: 'Put the green build block on the blue build block + + ' + subtask_index: 250 +- subtask: 'Put the green build block on the orange build block + + ' + subtask_index: 251 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 252 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 253 +- subtask: Put the blue cube on the yellow cube + subtask_index: 254 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 255 +- subtask: Put the blue cube on the right of the orange cube with the right gripper + subtask_index: 256 +- subtask: Put the yellow cube on the right of the orange cube with the right gripper + subtask_index: 257 +- subtask: Put the red cube on the left of the orange cube with the left gripper + subtask_index: 258 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 259 +- subtask: 'Put the orange build block on the left of the red build block + + ' + subtask_index: 260 +- subtask: 'Put the red build block on the front of the green build block + + ' + subtask_index: 261 +- subtask: Put the blue cube on the right of the red cube with the right gripper + subtask_index: 262 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 263 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 264 +- subtask: 'Put the red build block on the behind of the blue build block + + ' + subtask_index: 265 +- subtask: Put the yellow cube on the left of the green cube + subtask_index: 266 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 267 +- subtask: Put the green cube on the right of the blue cube + subtask_index: 268 +- subtask: 'Put the red build block on the blue build block + + ' + subtask_index: 269 +- subtask: 'Put the blue build block on the red build block + + ' + subtask_index: 270 +- subtask: Put the orange cube in the front of the blue cube + subtask_index: 271 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 272 +- subtask: Put the blue build block on the right of the yellow build block + subtask_index: 273 +- subtask: Put the red build block on the right of the blue build block + subtask_index: 274 +- subtask: Put the green cube in the front of the blue cube + subtask_index: 275 +- subtask: Put the red cube in the front of the blue cube + subtask_index: 276 +- subtask: Put the blue cube on the right of the green cube with the left gripper + subtask_index: 277 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 278 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 279 +- subtask: Put the green cube and orange cube in the center of the table with the + left gripper + subtask_index: 280 +- subtask: 'Put the blue build block on the behind of the red build block + + ' + subtask_index: 281 +- subtask: Put the blue cube on the right of the orange cube + subtask_index: 282 +- subtask: 'Put the blue build block on the right of the green build block + + ' + subtask_index: 283 +- subtask: 'Put the red build block on the front of the blue build block + + ' + subtask_index: 284 +- subtask: Static + subtask_index: 285 +- subtask: 'Put the red build block on the front of the yellow build block + + ' + subtask_index: 286 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 287 +- subtask: Put the yellow cube in the front of the blue cube + subtask_index: 288 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 289 +- subtask: 'Put the yellow build block on the behind of the red build block + + ' + subtask_index: 290 +- subtask: Put the red cube on the right of the yellow cube + subtask_index: 291 +- subtask: 'Put the green build block on the behind of the red build block + + ' + subtask_index: 292 +- subtask: 'Put the yellow build block on the right of the green build block + + ' + subtask_index: 293 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 294 +- subtask: Put the orange cube on the left of the red cube + subtask_index: 295 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 296 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 297 +- subtask: 'Put the red build block in the center of the table + + ' + subtask_index: 298 +- subtask: 'Put the green build block on the right of the blue build block + + ' + subtask_index: 299 +- subtask: 'Put the green build block on the behind of the orange build block + + ' + subtask_index: 300 +- subtask: Put the blue cube in the front of the orange cube + subtask_index: 301 +- subtask: Put the green cube on the right of the blue cube with the right gripper + subtask_index: 302 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 303 +- subtask: 'Put the red build block on the behind of the green build block + + ' + subtask_index: 304 +- subtask: 'Put the red build block on the behind of the yellow build block + + ' + subtask_index: 305 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 306 +- subtask: 'Put the yellow build block on the right of the blue build block + + ' + subtask_index: 307 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 308 +- subtask: 'Put the green build block on the right of the yellow build block + + ' + subtask_index: 309 +- subtask: 'Put the orange build block in the center of the table + + ' + subtask_index: 310 +- subtask: Put the red cube on the right of the blue cube + subtask_index: 311 +- subtask: 'Put the green build block on the behind of the orange build block + + ' + subtask_index: 312 +- subtask: Put the blue build block on the right of the orange build block + subtask_index: 313 +- subtask: Put the orange cube on the behind of the yellow cube with the left gripper + subtask_index: 314 +- subtask: Put the orange cube in the front of the yellow cube + subtask_index: 315 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 316 +- subtask: 'Put the orange build block on the front of the blue build block + + ' + subtask_index: 317 +- subtask: Put the yellow cube on the behind of the green cube with the left gripper + subtask_index: 318 +- subtask: Put the red cube on the left of the blue cube + subtask_index: 319 +- subtask: Put the blue cube on the behind of the green cube + subtask_index: 320 +- subtask: Put the blue cube on the right of the green cube with the right gripper + subtask_index: 321 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 322 +- subtask: 'Put the red build block on the right of the blue build block + + ' + subtask_index: 323 +- subtask: Put the red cube on the left of the green cube with the right gripper + subtask_index: 324 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 325 +- subtask: 'Put the yellow build block on the behind of the blue build block + + ' + subtask_index: 326 +- subtask: Put the yellow cube on the left of the orange cube + subtask_index: 327 +- subtask: Put the orange cube in the center of the table + subtask_index: 328 +- subtask: Put the blue cube on the right of the red cube + subtask_index: 329 +- subtask: 'Put the blue build block on the right of the yellow build block + + ' + subtask_index: 330 +- subtask: 'Put the orange build block on the right of the green build block + + ' + subtask_index: 331 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 332 +- subtask: Put the red cube in the front of the green cube + subtask_index: 333 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 334 +- subtask: Put the yellow cube on the right of the green cube with the right gripper + subtask_index: 335 +- subtask: 'Put the orange build block on the behind of the yellow build block + + ' + subtask_index: 336 +- subtask: 'Put the green build block on the behind of the blue build block + + ' + subtask_index: 337 +- subtask: ' + + Put the yellow build block on the behind of the green build block + + ' + subtask_index: 338 +- subtask: 'Put the green build block on the behind of the yellow build block + + ' + subtask_index: 339 +- subtask: 'Put the orange build block on the behind of the blue build block + + ' + subtask_index: 340 +- subtask: Put the red cube on the right of the yellow cube with the right gripper + subtask_index: 341 +- subtask: Put the blue cube on the right of the yellow cube + subtask_index: 342 +- subtask: Put the green build block in the center of the table + subtask_index: 343 +- subtask: 'Put the green build block on the right of the red build block + + ' + subtask_index: 344 +- subtask: Put the green cube on the right of the yellow cube with the right gripper + subtask_index: 345 +- subtask: 'Put the red build block on the front of the yellow build block + + ' + subtask_index: 346 +- subtask: 'Put the green build block in the center of the table + + ' + subtask_index: 347 +- subtask: 'Put the green build block on the blue build block + + ' + subtask_index: 348 +- subtask: 'Put the green build block on the blue build block + + ' + subtask_index: 349 +- subtask: Put the blue cube in the front of the red cube + subtask_index: 350 +- subtask: 'Put the orange build block on the left of the red build block + + ' + subtask_index: 351 +- subtask: Put the yellow cube on the right of the red cube with the right gripper + subtask_index: 352 +- subtask: 'Put the yellow build block on the right of the orange build block + + ' + subtask_index: 353 +- subtask: Grasp the green block with right gripper + subtask_index: 354 +- subtask: 'Put the blue build block on the behind of the orange build block + + ' + subtask_index: 355 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 356 +- subtask: 'Put the blue build block on the right of the orange build block + + ' + subtask_index: 357 +- subtask: Put the red cube on the left of the yellow cube with the right gripper + subtask_index: 358 +- subtask: 'Put the orange build block on the right of the blue build block + + ' + subtask_index: 359 +- subtask: Put the blue cube on the left of the red cube + subtask_index: 360 +- subtask: 'Put the orange build block on the right of the build block + + ' + subtask_index: 361 +- subtask: 'Put the orange build block on the right of the yellow build block + + ' + subtask_index: 362 +- subtask: Put the orange cube in the front of the red cube + subtask_index: 363 +- subtask: Put the red cube in the front of the yellow cube + subtask_index: 364 +- subtask: Put the yellow cube on the left of the green cube with the left gripper + subtask_index: 365 +- subtask: Put the red cube on the right of the blue cube with the right gripper + subtask_index: 366 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 367 +- subtask: Put the red cube on the behind of the blue cube + subtask_index: 368 +- subtask: 'Put the yellow build block in the center of the table + + ' + subtask_index: 369 +- subtask: Put the yellow cube on the orange cube + subtask_index: 370 +- subtask: 'Put the blue build block in the center of the table + + ' + subtask_index: 371 +- subtask: Put the blue cube on the left of the red cube with the left gripper + subtask_index: 372 +- subtask: 'Put the blue build block on the behind of the yellow build block + + ' + subtask_index: 373 +- subtask: 'Put the green build block on the right of the orange build block + + ' + subtask_index: 374 +- subtask: Put the yellow cube in the front of the red cube + subtask_index: 375 +- subtask: 'Put the yellow build block on the behind of the green build block + + ' + subtask_index: 376 +- subtask: Put the orange cube on the right of the red cube with the right gripper + subtask_index: 377 +- subtask: Put the orange cube on the front of the blue cube with the right gripper + subtask_index: 378 +- subtask: End + subtask_index: 379 +- subtask: Put the green cube on the behind of the blue cube + subtask_index: 380 +- subtask: 'Put blue the build block on the right of the red build block + + ' + subtask_index: 381 +- subtask: Put the blue cube on the left of the red cube with the right gripper + subtask_index: 382 +- subtask: 'null' + subtask_index: 383 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 672 + total_frames: 643028 + fps: 30 + total_tasks: 384 + total_videos: 2016 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 3.97 GB +frame_num: 643028 +dataset_size: 3.97 GB +data_structure: "Agilex_Cobot_Magic_Agilex_Cobot_Magic_stack_block_qced_hardlink/\n\ + |-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ + | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (660 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:671 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_bread_basket.yaml b/dataset_info/Agilex_Cobot_Magic_storage_bread_basket.yaml index 5fb6e5f4b4ebd53089c3cc59a2385f797b85ef31..0e2771ea87d3ee33debffd82bddcdd2754720ad4 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_bread_basket.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_bread_basket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,29 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Put the bakery in the basket. +task_instruction: +- Put the bakery in the basket. sub_tasks: -- Abnormal -- Grasp the bread with right gripper -- End -- Grasp the bread with left gripper -- Place the bread in the basket with right gripper -- Place the bread in the basket with left gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Grasp the bread with right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the bread with left gripper + subtask_index: 3 +- subtask: Place the bread in the basket with right gripper + subtask_index: 4 +- subtask: Place the bread in the basket with left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +89,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 32042 fps: 30 @@ -181,11 +185,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_bread_basket_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -458,7 +460,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -466,7 +468,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -493,191 +494,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_bread_basket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Put the bakery in the basket. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Grasp the bread with right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the bread with left gripper - subtask_index: 3 - - subtask: Place the bread in the basket with right gripper - subtask_index: 4 - - subtask: Place the bread in the basket with left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 32042 - dataset_size: 354.51 MB - data_structure: 'Agilex_Cobot_Magic_storage_bread_basket_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_fruit_basket.yaml b/dataset_info/Agilex_Cobot_Magic_storage_fruit_basket.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c33a6b8f833f01ead14c7bef79ede8e45033199d --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_fruit_basket.yaml @@ -0,0 +1,471 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_fruit_basket +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: apple + level1: food + level2: apple + level3: null + level4: null + level5: null +- object_name: laundry_detergent + level1: daily_necessities + level2: laundry_detergent + level3: null + level4: null + level5: null +- object_name: glass_cup + level1: kitchen_supplies + level2: glass_cup + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- randomly pick up an item from the table and put it in the basket. +sub_tasks: +- subtask: Place the apple into the basket with the right gripper + subtask_index: 0 +- subtask: Place the banana into the basket with the right gripper + subtask_index: 1 +- subtask: Grasp the round bread with the left gripper + subtask_index: 2 +- subtask: Grasp the apple with the right gripper + subtask_index: 3 +- subtask: Place the avocado into the basket with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the banana with the right gripper + subtask_index: 6 +- subtask: Grasp the avocado with the right gripper + subtask_index: 7 +- subtask: Place the round bread into the basket with the left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 199 + total_frames: 71385 + fps: 30 + total_tasks: 10 + total_videos: 597 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 1.01 GB +frame_num: 71385 +dataset_size: 1.01 GB +data_structure: "Agilex_Cobot_Magic_storage_fruit_basket_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (187 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:198 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_fruit_bowl.yaml b/dataset_info/Agilex_Cobot_Magic_storage_fruit_bowl.yaml index be78f21ddd5476ae84bac4da053049f6b1759aaf..63c8897117d88b0cfe7ece7935fb9068fae6c379 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_fruit_bowl.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_fruit_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -63,36 +63,51 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the three fruits on the table into a bowl. +task_instruction: +- put the three fruits on the table into a bowl. sub_tasks: -- Grasp the pomegranate with left gripper -- Place the pomegranate in the blue bowl with left gripper -- Grasp the pomegranate with right gripper -- Place the green lemon in the blue bowl with right gripper -- Grasp the green lemon with left gripper -- Place the pomegranate in the blue bowl with right gripper -- Place the mango in the blue bowl with right gripper -- Grasp the mango with right gripper -- End -- Place the mango in the blue bowl with left gripper -- Grasp the green lemon with right gripper -- Place the green lemon in the blue bowl with left gripper -- Grasp the mango with left gripper -- 'null' +- subtask: Grasp the pomegranate with left gripper + subtask_index: 0 +- subtask: Place the pomegranate in the blue bowl with left gripper + subtask_index: 1 +- subtask: Grasp the pomegranate with right gripper + subtask_index: 2 +- subtask: Place the green lemon in the blue bowl with right gripper + subtask_index: 3 +- subtask: Grasp the green lemon with left gripper + subtask_index: 4 +- subtask: Place the pomegranate in the blue bowl with right gripper + subtask_index: 5 +- subtask: Place the mango in the blue bowl with right gripper + subtask_index: 6 +- subtask: Grasp the mango with right gripper + subtask_index: 7 +- subtask: End + subtask_index: 8 +- subtask: Place the mango in the blue bowl with left gripper + subtask_index: 9 +- subtask: Grasp the green lemon with right gripper + subtask_index: 10 +- subtask: Place the green lemon in the blue bowl with left gripper + subtask_index: 11 +- subtask: Grasp the mango with left gripper + subtask_index: 12 +- subtask: 'null' + subtask_index: 13 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -100,13 +115,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -114,8 +126,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 78670 fps: 30 @@ -200,11 +211,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_fruit_bowl_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -477,7 +486,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -485,7 +494,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -512,205 +520,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_fruit_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the three fruits on the table into a bowl. - sub_tasks: - - subtask: Grasp the pomegranate with left gripper - subtask_index: 0 - - subtask: Place the pomegranate in the blue bowl with left gripper - subtask_index: 1 - - subtask: Grasp the pomegranate with right gripper - subtask_index: 2 - - subtask: Place the green lemon in the blue bowl with right gripper - subtask_index: 3 - - subtask: Grasp the green lemon with left gripper - subtask_index: 4 - - subtask: Place the pomegranate in the blue bowl with right gripper - subtask_index: 5 - - subtask: Place the mango in the blue bowl with right gripper - subtask_index: 6 - - subtask: Grasp the mango with right gripper - subtask_index: 7 - - subtask: End - subtask_index: 8 - - subtask: Place the mango in the blue bowl with left gripper - subtask_index: 9 - - subtask: Grasp the green lemon with right gripper - subtask_index: 10 - - subtask: Place the green lemon in the blue bowl with left gripper - subtask_index: 11 - - subtask: Grasp the mango with left gripper - subtask_index: 12 - - subtask: 'null' - subtask_index: 13 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 78670 - dataset_size: 740.83 MB - data_structure: 'Agilex_Cobot_Magic_storage_fruit_bowl_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_lemon_mango.yaml b/dataset_info/Agilex_Cobot_Magic_storage_lemon_mango.yaml index 97fff2881fc71f94fa662ce4723f20932fec3fec..2711ad74df603d389de529a95688ae8a32f1ec6c 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_lemon_mango.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_lemon_mango.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -75,33 +75,43 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Randomly grab green lemon or mangoes from the table and put them - in a basket. +task_instruction: +- Randomly grab green lemon or mangoes from the table and put them in a basket. sub_tasks: -- Place the mango into the basket with the right gripper -- Grasp the mango with the right gripper -- Grasp the Lemon with the right gripper -- End -- Grasp the mango with the left gripper -- Place the Lemon into the basket with the left gripper -- Place the mango into the basket with the left gripper -- Grasp the Lemon with the left gripper -- Place the Lemon into the basket with the right gripper -- 'null' +- subtask: Place the mango into the basket with the right gripper + subtask_index: 0 +- subtask: Grasp the mango with the right gripper + subtask_index: 1 +- subtask: Grasp the Lemon with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Grasp the mango with the left gripper + subtask_index: 4 +- subtask: Place the Lemon into the basket with the left gripper + subtask_index: 5 +- subtask: Place the mango into the basket with the left gripper + subtask_index: 6 +- subtask: Grasp the Lemon with the left gripper + subtask_index: 7 +- subtask: Place the Lemon into the basket with the right gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -109,13 +119,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -123,8 +130,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 28424 fps: 30 @@ -225,11 +231,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_lemon_mango_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -502,7 +506,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -510,7 +514,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -537,213 +540,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_lemon_mango - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Randomly grab green lemon or mangoes from the table and put them in a basket. - sub_tasks: - - subtask: Place the mango into the basket with the right gripper - subtask_index: 0 - - subtask: Grasp the mango with the right gripper - subtask_index: 1 - - subtask: Grasp the Lemon with the right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Grasp the mango with the left gripper - subtask_index: 4 - - subtask: Place the Lemon into the basket with the left gripper - subtask_index: 5 - - subtask: Place the mango into the basket with the left gripper - subtask_index: 6 - - subtask: Grasp the Lemon with the left gripper - subtask_index: 7 - - subtask: Place the Lemon into the basket with the right gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 28424 - dataset_size: 362.05 MB - data_structure: 'Agilex_Cobot_Magic_storage_lemon_mango_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object.yaml index 5117ca725023e559f9cffe48099ac145ad94d5a0..2b745a4661de3feb9c082d49d0068d1ea36f6cd5 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_object.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_object.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_table_cloths level1: laboratory_supplies level2: white_table_cloths @@ -363,164 +363,308 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up an item with a gripper and place it in a random container - on the table. +task_instruction: +- pick up an item with a gripper and place it in a random container on the table. sub_tasks: -- Grasp the pink towel with the right gripper -- Place the XX into the purple pot with the left gripper -- Grasp the yellow lemon with the right gripper -- Grasp the AD milk with the left gripper -- Place the plush banana into the pink bowl with the left gripper -- Place the beer into the purple pot with the right gripper -- Place the green lemon into the pink bowl with the right gripper -- Grasp the red bull with the right gripper -- Grasp the beer with the right gripper -- Place the hollow ring bread into the red pot with the left gripper -- Grasp the banana with the left gripper -- Place the yellow lemon into the pink bowl with the left gripper -- Place the yellow lemon into the blue bowl with the right gripper -- Grasp the pink laundry detergent with the left gripper -- Place the eggplant into the purple pot with the right gripper -- Grasp the apple with the right gripper -- Place the peach into the red pot with the left gripper -- Place the red bull into the blue bowl with the left gripper -- Place the blue garbage bag into the pen container with the right gripper -- Grasp the shampoo with the right gripper -- Place the plush banana into the pink bowl with the right gripper -- Place the banana into the red pot with the left gripper -- Place the apple into the red pot with the left gripper -- Place the round chewing gum into the pen container with the left gripper -- Place the beer into the pink pot with the left gripper -- Place the yellow cake into the pink bowl with the left gripper -- Place the tin into the red pot with the right gripper -- Place the blue garbage bag into the purple pot with the left gripper -- Place the croissant into the purple pot with the right gripper -- Place the shampoo into the blue bowl with the left gripper -- Grasp the yellow lemon with the left gripper -- Place the eyeglass case into the purple pot with the right gripper -- Grasp the white blackboard erasure with the left gripper -- Place the yogurt into the pink bowl with the right gripper -- Place the blue garbage bag into the pink pot with the right gripper -- Place the green lemon into the blue bowl with the left gripper -- Place the apple into the pink pot with the left gripper -- Grasp the eyeglass case with the right gripper -- Place the yellow cake into the blue bowl with the left gripper -- Place the red bull into the pen container with the right gripper -- Place the croissant into the pink bowl with the left gripper -- Grasp the shower sphere with the left gripper -- Grasp the yogurt with the right gripper -- Grasp the croissant with the left gripper -- Grasp the tin with the left gripper -- Grasp the long bread with the right gripper -- Grasp the hard facial cleanser with the left gripper -- Place the mango into the pink bowl with the left gripper -- Grasp the plush banana with the left gripper -- Place the pink laundry detergent into the pen container with the left gripper -- Place the soft facial cleanser into the pink bowl with the right gripper -- Grasp the peach with the right gripper -- Grasp the mango with the left gripper -- Place the round chewing gum into the pink pot with the left gripper -- Place the white blackboard erasure into the pink bowl with the left gripper -- Place the peach into the pink bowl with the left gripper -- Grasp the pear with the left gripper -- Place the tin into the pink pot with the left gripper -- Place the cleaning agent into the pen container with the left gripper -- Place the shampoo into the blue bowl with the right gripper -- Grasp the hollow ring bread with the right gripper -- Place the shower sphere into the pink bowl with the right gripper -- Place the pear into the purple pot with the left gripper -- Place the round chewing gum into the pen container with the right gripper -- Grasp the peach with the left gripper -- Grasp the long bread with the left gripper -- Grasp the eggplant with the right gripper -- Place the blue garbage bag into the pink bowl with the right gripper -- Place the croissant into the red pot with the left gripper -- Grasp the green lemon with the right gripper -- Place the beer into the blue bowl with the left gripper -- Place the shampoo into the purple pot with the right gripper -- Grasp the shampoo with the left gripper -- Place the pink towel into the red pot with the left gripper -- Place the peach into the purple pot with the right gripper -- End -- Place the mint candy into the pink bowl with the right gripper -- Place the eggplant into the pen container with the right gripper -- Grasp the beer with the left gripper -- Place the AD milk into the pen container with the left gripper -- Place the banana into the pink bowl with the left gripper -- Grasp the tin with the right gripper -- Place the tin into the pink pot with the right gripper -- Place the AD milk into the pink pot with the left gripper -- Place the shower sphere into the purple pot with the right gripper -- Place the long bread into the red pot with the left gripper -- Grasp the yellow cake with the right gripper -- Grasp the croissant with the right gripper -- Grasp the red bull with the left gripper -- Place the peach into the pink bowl with the right gripper -- Place the tin into the pen container with the right gripper -- Place the long bread into the purple pot with the right gripper -- Place the orange into the pink pot with the left gripper -- Place the yellow cake into the blue bowl with the right gripper -- Grasp the pink towel with the left gripper -- Place the croissant into the pink bowl with the right gripper -- Place the hard facial cleanser into the pink bowl with the left gripper -- Place the round chewing gum into the red pot with the right gripper -- Grasp the blue garbage bag with the right gripper -- Grasp the tin with the right gripper -- Grasp the orange with the left gripper -- Grasp the blue garbage bag with the left gripper -- Place the shower sphere into the pink bowl with the left gripper -- Place the yogurt into the red pot with the right gripper -- Place the peach into the pink pot with the right gripper -- Grasp the hollow ring bread with the left gripper -- Grasp the apple with the left gripper -- Place the tin into the red pot with the left gripper -- Place the beer into the red pot with the left gripper -- Place the blue garbage bag into the pink bowl with the left gripper -- Place the mango into the purple pot with the right gripper -- Place the tin into the blue bowl with the left gripper -- Place the apple into the blue bowl with the left gripper -- Grasp the shower sphere with the right gripper -- Place the red bull into the pink bowl with the left gripper -- Grasp the yellow cake with the left gripper -- Grasp the round chewing gum with the right gripper -- Grasp the cleaning agent with the left gripper -- Place the coke into the pink pot with the left gripper -- Place the pink towelinto the pink bowl with the right gripper -- Grasp the round chewing gum with the left gripper -- Place the green lemon into the purple pot with the left gripper -- Place the hollow ring bread into the purple pot with the right gripper -- Place the peach into the purple pot with the left gripper -- Place the yellow cake into the pen container with the right gripper -- Place the apple into the pink pot with the right gripper -- Place the red bull into the pink pot with the right gripper -- Place the tin into the purple pot with the left gripper -- Place the yellow lemon into the pink bowl with the right gripper -- Grasp the mango with the right gripper -- Place the banana into the red pot with the right gripper -- Grasp the mint candy with the right gripper -- Place the apple into the pink bowl with the left gripper -- Grasp the soft facial cleanser with the right gripper -- Grasp the green lemon with the left gripper -- Place the red bull into the red pot with the left gripper -- Grasp the coke with the left gripper -- Place the cleaning agent into the pink bowl with the left gripper -- Grasp the banana with the right gripper -- Grasp the plush banana with the right gripper -- 'null' +- subtask: Grasp the pink towel with the right gripper + subtask_index: 0 +- subtask: Place the XX into the purple pot with the left gripper + subtask_index: 1 +- subtask: Grasp the yellow lemon with the right gripper + subtask_index: 2 +- subtask: Grasp the AD milk with the left gripper + subtask_index: 3 +- subtask: Place the plush banana into the pink bowl with the left gripper + subtask_index: 4 +- subtask: Place the beer into the purple pot with the right gripper + subtask_index: 5 +- subtask: Place the green lemon into the pink bowl with the right gripper + subtask_index: 6 +- subtask: Grasp the red bull with the right gripper + subtask_index: 7 +- subtask: Grasp the beer with the right gripper + subtask_index: 8 +- subtask: Place the hollow ring bread into the red pot with the left gripper + subtask_index: 9 +- subtask: Grasp the banana with the left gripper + subtask_index: 10 +- subtask: Place the yellow lemon into the pink bowl with the left gripper + subtask_index: 11 +- subtask: Place the yellow lemon into the blue bowl with the right gripper + subtask_index: 12 +- subtask: Grasp the pink laundry detergent with the left gripper + subtask_index: 13 +- subtask: Place the eggplant into the purple pot with the right gripper + subtask_index: 14 +- subtask: Grasp the apple with the right gripper + subtask_index: 15 +- subtask: Place the peach into the red pot with the left gripper + subtask_index: 16 +- subtask: Place the red bull into the blue bowl with the left gripper + subtask_index: 17 +- subtask: Place the blue garbage bag into the pen container with the right gripper + subtask_index: 18 +- subtask: Grasp the shampoo with the right gripper + subtask_index: 19 +- subtask: Place the plush banana into the pink bowl with the right gripper + subtask_index: 20 +- subtask: Place the banana into the red pot with the left gripper + subtask_index: 21 +- subtask: Place the apple into the red pot with the left gripper + subtask_index: 22 +- subtask: Place the round chewing gum into the pen container with the left gripper + subtask_index: 23 +- subtask: Place the beer into the pink pot with the left gripper + subtask_index: 24 +- subtask: Place the yellow cake into the pink bowl with the left gripper + subtask_index: 25 +- subtask: Place the tin into the red pot with the right gripper + subtask_index: 26 +- subtask: Place the blue garbage bag into the purple pot with the left gripper + subtask_index: 27 +- subtask: Place the croissant into the purple pot with the right gripper + subtask_index: 28 +- subtask: Place the shampoo into the blue bowl with the left gripper + subtask_index: 29 +- subtask: Grasp the yellow lemon with the left gripper + subtask_index: 30 +- subtask: Place the eyeglass case into the purple pot with the right gripper + subtask_index: 31 +- subtask: Grasp the white blackboard erasure with the left gripper + subtask_index: 32 +- subtask: Place the yogurt into the pink bowl with the right gripper + subtask_index: 33 +- subtask: Place the blue garbage bag into the pink pot with the right gripper + subtask_index: 34 +- subtask: Place the green lemon into the blue bowl with the left gripper + subtask_index: 35 +- subtask: Place the apple into the pink pot with the left gripper + subtask_index: 36 +- subtask: Grasp the eyeglass case with the right gripper + subtask_index: 37 +- subtask: Place the yellow cake into the blue bowl with the left gripper + subtask_index: 38 +- subtask: Place the red bull into the pen container with the right gripper + subtask_index: 39 +- subtask: Place the croissant into the pink bowl with the left gripper + subtask_index: 40 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 41 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 42 +- subtask: Grasp the croissant with the left gripper + subtask_index: 43 +- subtask: Grasp the tin with the left gripper + subtask_index: 44 +- subtask: Grasp the long bread with the right gripper + subtask_index: 45 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 46 +- subtask: Place the mango into the pink bowl with the left gripper + subtask_index: 47 +- subtask: Grasp the plush banana with the left gripper + subtask_index: 48 +- subtask: Place the pink laundry detergent into the pen container with the left + gripper + subtask_index: 49 +- subtask: Place the soft facial cleanser into the pink bowl with the right gripper + subtask_index: 50 +- subtask: Grasp the peach with the right gripper + subtask_index: 51 +- subtask: Grasp the mango with the left gripper + subtask_index: 52 +- subtask: Place the round chewing gum into the pink pot with the left gripper + subtask_index: 53 +- subtask: Place the white blackboard erasure into the pink bowl with the left gripper + subtask_index: 54 +- subtask: Place the peach into the pink bowl with the left gripper + subtask_index: 55 +- subtask: Grasp the pear with the left gripper + subtask_index: 56 +- subtask: Place the tin into the pink pot with the left gripper + subtask_index: 57 +- subtask: Place the cleaning agent into the pen container with the left gripper + subtask_index: 58 +- subtask: Place the shampoo into the blue bowl with the right gripper + subtask_index: 59 +- subtask: Grasp the hollow ring bread with the right gripper + subtask_index: 60 +- subtask: Place the shower sphere into the pink bowl with the right gripper + subtask_index: 61 +- subtask: Place the pear into the purple pot with the left gripper + subtask_index: 62 +- subtask: Place the round chewing gum into the pen container with the right gripper + subtask_index: 63 +- subtask: Grasp the peach with the left gripper + subtask_index: 64 +- subtask: Grasp the long bread with the left gripper + subtask_index: 65 +- subtask: Grasp the eggplant with the right gripper + subtask_index: 66 +- subtask: Place the blue garbage bag into the pink bowl with the right gripper + subtask_index: 67 +- subtask: Place the croissant into the red pot with the left gripper + subtask_index: 68 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 69 +- subtask: Place the beer into the blue bowl with the left gripper + subtask_index: 70 +- subtask: Place the shampoo into the purple pot with the right gripper + subtask_index: 71 +- subtask: Grasp the shampoo with the left gripper + subtask_index: 72 +- subtask: Place the pink towel into the red pot with the left gripper + subtask_index: 73 +- subtask: Place the peach into the purple pot with the right gripper + subtask_index: 74 +- subtask: End + subtask_index: 75 +- subtask: Place the mint candy into the pink bowl with the right gripper + subtask_index: 76 +- subtask: Place the eggplant into the pen container with the right gripper + subtask_index: 77 +- subtask: Grasp the beer with the left gripper + subtask_index: 78 +- subtask: Place the AD milk into the pen container with the left gripper + subtask_index: 79 +- subtask: Place the banana into the pink bowl with the left gripper + subtask_index: 80 +- subtask: 'Grasp the tin with the right gripper + + ' + subtask_index: 81 +- subtask: Place the tin into the pink pot with the right gripper + subtask_index: 82 +- subtask: Place the AD milk into the pink pot with the left gripper + subtask_index: 83 +- subtask: Place the shower sphere into the purple pot with the right gripper + subtask_index: 84 +- subtask: Place the long bread into the red pot with the left gripper + subtask_index: 85 +- subtask: Grasp the yellow cake with the right gripper + subtask_index: 86 +- subtask: Grasp the croissant with the right gripper + subtask_index: 87 +- subtask: Grasp the red bull with the left gripper + subtask_index: 88 +- subtask: Place the peach into the pink bowl with the right gripper + subtask_index: 89 +- subtask: Place the tin into the pen container with the right gripper + subtask_index: 90 +- subtask: Place the long bread into the purple pot with the right gripper + subtask_index: 91 +- subtask: Place the orange into the pink pot with the left gripper + subtask_index: 92 +- subtask: Place the yellow cake into the blue bowl with the right gripper + subtask_index: 93 +- subtask: Grasp the pink towel with the left gripper + subtask_index: 94 +- subtask: Place the croissant into the pink bowl with the right gripper + subtask_index: 95 +- subtask: Place the hard facial cleanser into the pink bowl with the left gripper + subtask_index: 96 +- subtask: Place the round chewing gum into the red pot with the right gripper + subtask_index: 97 +- subtask: Grasp the blue garbage bag with the right gripper + subtask_index: 98 +- subtask: Grasp the tin with the right gripper + subtask_index: 99 +- subtask: Grasp the orange with the left gripper + subtask_index: 100 +- subtask: Grasp the blue garbage bag with the left gripper + subtask_index: 101 +- subtask: Place the shower sphere into the pink bowl with the left gripper + subtask_index: 102 +- subtask: Place the yogurt into the red pot with the right gripper + subtask_index: 103 +- subtask: Place the peach into the pink pot with the right gripper + subtask_index: 104 +- subtask: Grasp the hollow ring bread with the left gripper + subtask_index: 105 +- subtask: Grasp the apple with the left gripper + subtask_index: 106 +- subtask: Place the tin into the red pot with the left gripper + subtask_index: 107 +- subtask: Place the beer into the red pot with the left gripper + subtask_index: 108 +- subtask: Place the blue garbage bag into the pink bowl with the left gripper + subtask_index: 109 +- subtask: Place the mango into the purple pot with the right gripper + subtask_index: 110 +- subtask: Place the tin into the blue bowl with the left gripper + subtask_index: 111 +- subtask: Place the apple into the blue bowl with the left gripper + subtask_index: 112 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 113 +- subtask: Place the red bull into the pink bowl with the left gripper + subtask_index: 114 +- subtask: Grasp the yellow cake with the left gripper + subtask_index: 115 +- subtask: Grasp the round chewing gum with the right gripper + subtask_index: 116 +- subtask: Grasp the cleaning agent with the left gripper + subtask_index: 117 +- subtask: Place the coke into the pink pot with the left gripper + subtask_index: 118 +- subtask: Place the pink towelinto the pink bowl with the right gripper + subtask_index: 119 +- subtask: Grasp the round chewing gum with the left gripper + subtask_index: 120 +- subtask: Place the green lemon into the purple pot with the left gripper + subtask_index: 121 +- subtask: Place the hollow ring bread into the purple pot with the right gripper + subtask_index: 122 +- subtask: Place the peach into the purple pot with the left gripper + subtask_index: 123 +- subtask: Place the yellow cake into the pen container with the right gripper + subtask_index: 124 +- subtask: Place the apple into the pink pot with the right gripper + subtask_index: 125 +- subtask: Place the red bull into the pink pot with the right gripper + subtask_index: 126 +- subtask: Place the tin into the purple pot with the left gripper + subtask_index: 127 +- subtask: Place the yellow lemon into the pink bowl with the right gripper + subtask_index: 128 +- subtask: Grasp the mango with the right gripper + subtask_index: 129 +- subtask: Place the banana into the red pot with the right gripper + subtask_index: 130 +- subtask: Grasp the mint candy with the right gripper + subtask_index: 131 +- subtask: Place the apple into the pink bowl with the left gripper + subtask_index: 132 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 133 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 134 +- subtask: Place the red bull into the red pot with the left gripper + subtask_index: 135 +- subtask: Grasp the coke with the left gripper + subtask_index: 136 +- subtask: Place the cleaning agent into the pink bowl with the left gripper + subtask_index: 137 +- subtask: Grasp the banana with the right gripper + subtask_index: 138 +- subtask: Grasp the plush banana with the right gripper + subtask_index: 139 +- subtask: 'null' + subtask_index: 140 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -528,13 +672,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -542,8 +683,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 49237 fps: 30 @@ -644,11 +784,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_object_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -921,7 +1059,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -929,7 +1067,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -956,478 +1093,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_object - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up an item with a gripper and place it in a random container on the table. - sub_tasks: - - subtask: Grasp the pink towel with the right gripper - subtask_index: 0 - - subtask: Place the XX into the purple pot with the left gripper - subtask_index: 1 - - subtask: Grasp the yellow lemon with the right gripper - subtask_index: 2 - - subtask: Grasp the AD milk with the left gripper - subtask_index: 3 - - subtask: Place the plush banana into the pink bowl with the left gripper - subtask_index: 4 - - subtask: Place the beer into the purple pot with the right gripper - subtask_index: 5 - - subtask: Place the green lemon into the pink bowl with the right gripper - subtask_index: 6 - - subtask: Grasp the red bull with the right gripper - subtask_index: 7 - - subtask: Grasp the beer with the right gripper - subtask_index: 8 - - subtask: Place the hollow ring bread into the red pot with the left gripper - subtask_index: 9 - - subtask: Grasp the banana with the left gripper - subtask_index: 10 - - subtask: Place the yellow lemon into the pink bowl with the left gripper - subtask_index: 11 - - subtask: Place the yellow lemon into the blue bowl with the right gripper - subtask_index: 12 - - subtask: Grasp the pink laundry detergent with the left gripper - subtask_index: 13 - - subtask: Place the eggplant into the purple pot with the right gripper - subtask_index: 14 - - subtask: Grasp the apple with the right gripper - subtask_index: 15 - - subtask: Place the peach into the red pot with the left gripper - subtask_index: 16 - - subtask: Place the red bull into the blue bowl with the left gripper - subtask_index: 17 - - subtask: Place the blue garbage bag into the pen container with the right gripper - subtask_index: 18 - - subtask: Grasp the shampoo with the right gripper - subtask_index: 19 - - subtask: Place the plush banana into the pink bowl with the right gripper - subtask_index: 20 - - subtask: Place the banana into the red pot with the left gripper - subtask_index: 21 - - subtask: Place the apple into the red pot with the left gripper - subtask_index: 22 - - subtask: Place the round chewing gum into the pen container with the left gripper - subtask_index: 23 - - subtask: Place the beer into the pink pot with the left gripper - subtask_index: 24 - - subtask: Place the yellow cake into the pink bowl with the left gripper - subtask_index: 25 - - subtask: Place the tin into the red pot with the right gripper - subtask_index: 26 - - subtask: Place the blue garbage bag into the purple pot with the left gripper - subtask_index: 27 - - subtask: Place the croissant into the purple pot with the right gripper - subtask_index: 28 - - subtask: Place the shampoo into the blue bowl with the left gripper - subtask_index: 29 - - subtask: Grasp the yellow lemon with the left gripper - subtask_index: 30 - - subtask: Place the eyeglass case into the purple pot with the right gripper - subtask_index: 31 - - subtask: Grasp the white blackboard erasure with the left gripper - subtask_index: 32 - - subtask: Place the yogurt into the pink bowl with the right gripper - subtask_index: 33 - - subtask: Place the blue garbage bag into the pink pot with the right gripper - subtask_index: 34 - - subtask: Place the green lemon into the blue bowl with the left gripper - subtask_index: 35 - - subtask: Place the apple into the pink pot with the left gripper - subtask_index: 36 - - subtask: Grasp the eyeglass case with the right gripper - subtask_index: 37 - - subtask: Place the yellow cake into the blue bowl with the left gripper - subtask_index: 38 - - subtask: Place the red bull into the pen container with the right gripper - subtask_index: 39 - - subtask: Place the croissant into the pink bowl with the left gripper - subtask_index: 40 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 41 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 42 - - subtask: Grasp the croissant with the left gripper - subtask_index: 43 - - subtask: Grasp the tin with the left gripper - subtask_index: 44 - - subtask: Grasp the long bread with the right gripper - subtask_index: 45 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 46 - - subtask: Place the mango into the pink bowl with the left gripper - subtask_index: 47 - - subtask: Grasp the plush banana with the left gripper - subtask_index: 48 - - subtask: Place the pink laundry detergent into the pen container with the left - gripper - subtask_index: 49 - - subtask: Place the soft facial cleanser into the pink bowl with the right gripper - subtask_index: 50 - - subtask: Grasp the peach with the right gripper - subtask_index: 51 - - subtask: Grasp the mango with the left gripper - subtask_index: 52 - - subtask: Place the round chewing gum into the pink pot with the left gripper - subtask_index: 53 - - subtask: Place the white blackboard erasure into the pink bowl with the left gripper - subtask_index: 54 - - subtask: Place the peach into the pink bowl with the left gripper - subtask_index: 55 - - subtask: Grasp the pear with the left gripper - subtask_index: 56 - - subtask: Place the tin into the pink pot with the left gripper - subtask_index: 57 - - subtask: Place the cleaning agent into the pen container with the left gripper - subtask_index: 58 - - subtask: Place the shampoo into the blue bowl with the right gripper - subtask_index: 59 - - subtask: Grasp the hollow ring bread with the right gripper - subtask_index: 60 - - subtask: Place the shower sphere into the pink bowl with the right gripper - subtask_index: 61 - - subtask: Place the pear into the purple pot with the left gripper - subtask_index: 62 - - subtask: Place the round chewing gum into the pen container with the right gripper - subtask_index: 63 - - subtask: Grasp the peach with the left gripper - subtask_index: 64 - - subtask: Grasp the long bread with the left gripper - subtask_index: 65 - - subtask: Grasp the eggplant with the right gripper - subtask_index: 66 - - subtask: Place the blue garbage bag into the pink bowl with the right gripper - subtask_index: 67 - - subtask: Place the croissant into the red pot with the left gripper - subtask_index: 68 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 69 - - subtask: Place the beer into the blue bowl with the left gripper - subtask_index: 70 - - subtask: Place the shampoo into the purple pot with the right gripper - subtask_index: 71 - - subtask: Grasp the shampoo with the left gripper - subtask_index: 72 - - subtask: Place the pink towel into the red pot with the left gripper - subtask_index: 73 - - subtask: Place the peach into the purple pot with the right gripper - subtask_index: 74 - - subtask: End - subtask_index: 75 - - subtask: Place the mint candy into the pink bowl with the right gripper - subtask_index: 76 - - subtask: Place the eggplant into the pen container with the right gripper - subtask_index: 77 - - subtask: Grasp the beer with the left gripper - subtask_index: 78 - - subtask: Place the AD milk into the pen container with the left gripper - subtask_index: 79 - - subtask: Place the banana into the pink bowl with the left gripper - subtask_index: 80 - - subtask: 'Grasp the tin with the right gripper - - ' - subtask_index: 81 - - subtask: Place the tin into the pink pot with the right gripper - subtask_index: 82 - - subtask: Place the AD milk into the pink pot with the left gripper - subtask_index: 83 - - subtask: Place the shower sphere into the purple pot with the right gripper - subtask_index: 84 - - subtask: Place the long bread into the red pot with the left gripper - subtask_index: 85 - - subtask: Grasp the yellow cake with the right gripper - subtask_index: 86 - - subtask: Grasp the croissant with the right gripper - subtask_index: 87 - - subtask: Grasp the red bull with the left gripper - subtask_index: 88 - - subtask: Place the peach into the pink bowl with the right gripper - subtask_index: 89 - - subtask: Place the tin into the pen container with the right gripper - subtask_index: 90 - - subtask: Place the long bread into the purple pot with the right gripper - subtask_index: 91 - - subtask: Place the orange into the pink pot with the left gripper - subtask_index: 92 - - subtask: Place the yellow cake into the blue bowl with the right gripper - subtask_index: 93 - - subtask: Grasp the pink towel with the left gripper - subtask_index: 94 - - subtask: Place the croissant into the pink bowl with the right gripper - subtask_index: 95 - - subtask: Place the hard facial cleanser into the pink bowl with the left gripper - subtask_index: 96 - - subtask: Place the round chewing gum into the red pot with the right gripper - subtask_index: 97 - - subtask: Grasp the blue garbage bag with the right gripper - subtask_index: 98 - - subtask: Grasp the tin with the right gripper - subtask_index: 99 - - subtask: Grasp the orange with the left gripper - subtask_index: 100 - - subtask: Grasp the blue garbage bag with the left gripper - subtask_index: 101 - - subtask: Place the shower sphere into the pink bowl with the left gripper - subtask_index: 102 - - subtask: Place the yogurt into the red pot with the right gripper - subtask_index: 103 - - subtask: Place the peach into the pink pot with the right gripper - subtask_index: 104 - - subtask: Grasp the hollow ring bread with the left gripper - subtask_index: 105 - - subtask: Grasp the apple with the left gripper - subtask_index: 106 - - subtask: Place the tin into the red pot with the left gripper - subtask_index: 107 - - subtask: Place the beer into the red pot with the left gripper - subtask_index: 108 - - subtask: Place the blue garbage bag into the pink bowl with the left gripper - subtask_index: 109 - - subtask: Place the mango into the purple pot with the right gripper - subtask_index: 110 - - subtask: Place the tin into the blue bowl with the left gripper - subtask_index: 111 - - subtask: Place the apple into the blue bowl with the left gripper - subtask_index: 112 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 113 - - subtask: Place the red bull into the pink bowl with the left gripper - subtask_index: 114 - - subtask: Grasp the yellow cake with the left gripper - subtask_index: 115 - - subtask: Grasp the round chewing gum with the right gripper - subtask_index: 116 - - subtask: Grasp the cleaning agent with the left gripper - subtask_index: 117 - - subtask: Place the coke into the pink pot with the left gripper - subtask_index: 118 - - subtask: Place the pink towelinto the pink bowl with the right gripper - subtask_index: 119 - - subtask: Grasp the round chewing gum with the left gripper - subtask_index: 120 - - subtask: Place the green lemon into the purple pot with the left gripper - subtask_index: 121 - - subtask: Place the hollow ring bread into the purple pot with the right gripper - subtask_index: 122 - - subtask: Place the peach into the purple pot with the left gripper - subtask_index: 123 - - subtask: Place the yellow cake into the pen container with the right gripper - subtask_index: 124 - - subtask: Place the apple into the pink pot with the right gripper - subtask_index: 125 - - subtask: Place the red bull into the pink pot with the right gripper - subtask_index: 126 - - subtask: Place the tin into the purple pot with the left gripper - subtask_index: 127 - - subtask: Place the yellow lemon into the pink bowl with the right gripper - subtask_index: 128 - - subtask: Grasp the mango with the right gripper - subtask_index: 129 - - subtask: Place the banana into the red pot with the right gripper - subtask_index: 130 - - subtask: Grasp the mint candy with the right gripper - subtask_index: 131 - - subtask: Place the apple into the pink bowl with the left gripper - subtask_index: 132 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 133 - - subtask: Grasp the green lemon with the left gripper - subtask_index: 134 - - subtask: Place the red bull into the red pot with the left gripper - subtask_index: 135 - - subtask: Grasp the coke with the left gripper - subtask_index: 136 - - subtask: Place the cleaning agent into the pink bowl with the left gripper - subtask_index: 137 - - subtask: Grasp the banana with the right gripper - subtask_index: 138 - - subtask: Grasp the plush banana with the right gripper - subtask_index: 139 - - subtask: 'null' - subtask_index: 140 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 49237 - dataset_size: 855.52 MB - data_structure: 'Agilex_Cobot_Magic_storage_object_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_basket.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_basket.yaml new file mode 100644 index 0000000000000000000000000000000000000000..24b37c339ce146795852cc4e93f8f5d623f21797 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_basket.yaml @@ -0,0 +1,463 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_object_basket +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: apple + level1: food + level2: apple + level3: null + level4: null + level5: null +- object_name: avocado + level1: food + level2: avocado + level3: null + level4: null + level5: null +- object_name: glass_cup + level1: kitchen_supplies + level2: glass_cup + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the right gripper storage the items on the table into the basket. +sub_tasks: +- subtask: Place the XX into the basket with the left gripper + subtask_index: 0 +- subtask: Grasp the XX with the right gripper + subtask_index: 1 +- subtask: Grasp the XX with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the XX into the basket with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 38216 + fps: 30 + total_tasks: 6 + total_videos: 300 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 585.85 MB +frame_num: 38216 +dataset_size: 585.85 MB +data_structure: "Agilex_Cobot_Magic_storage_object_basket_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_closest.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_closest.yaml index 080ee11f57410da23067ed3be55c5b0c142bb854..7a12569bd0154ee76f46fdf2028b682c5a7c2bbe 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_object_closest.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_closest.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -75,40 +75,59 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Place the object closest to the toothpaste in the basket. +task_instruction: +- Place the object closest to the toothpaste in the basket. sub_tasks: -- Grasp the shampoo with left gripper -- Grasp the soft facial cleanser with right gripper -- Place the pink laundry detergent in the basket with right gripper -- Place the cleaning agent in the basket with left gripper -- Place the shampoo in the basket with right gripper -- Grasp the shampoo with right gripper -- Place the pink laundry detergent in the basket with left gripper -- Place the soft facial cleanser in the basket with right gripper -- Grasp the pink laundry detergent with right gripper -- Grasp the soft facial cleanser with left gripper -- Place the cleaning agent in the basket with right gripper -- Place the soft facial cleanser in the basket with left gripper -- Grasp the pink laundry detergent with left gripper -- Place the shampoo in the basket with left gripper -- End -- Grasp the cleaning agent with right gripper -- Grasp the cleaning agent with left gripper -- 'null' +- subtask: Grasp the shampoo with left gripper + subtask_index: 0 +- subtask: Grasp the soft facial cleanser with right gripper + subtask_index: 1 +- subtask: Place the pink laundry detergent in the basket with right gripper + subtask_index: 2 +- subtask: Place the cleaning agent in the basket with left gripper + subtask_index: 3 +- subtask: Place the shampoo in the basket with right gripper + subtask_index: 4 +- subtask: Grasp the shampoo with right gripper + subtask_index: 5 +- subtask: Place the pink laundry detergent in the basket with left gripper + subtask_index: 6 +- subtask: Place the soft facial cleanser in the basket with right gripper + subtask_index: 7 +- subtask: Grasp the pink laundry detergent with right gripper + subtask_index: 8 +- subtask: Grasp the soft facial cleanser with left gripper + subtask_index: 9 +- subtask: Place the cleaning agent in the basket with right gripper + subtask_index: 10 +- subtask: Place the soft facial cleanser in the basket with left gripper + subtask_index: 11 +- subtask: Grasp the pink laundry detergent with left gripper + subtask_index: 12 +- subtask: Place the shampoo in the basket with left gripper + subtask_index: 13 +- subtask: End + subtask_index: 14 +- subtask: Grasp the cleaning agent with right gripper + subtask_index: 15 +- subtask: Grasp the cleaning agent with left gripper + subtask_index: 16 +- subtask: 'null' + subtask_index: 17 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -116,13 +135,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -130,8 +146,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 29779 fps: 30 @@ -232,11 +247,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_object_closest_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -509,7 +522,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -517,7 +530,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -544,229 +556,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_object_closest - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bathroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Place the object closest to the toothpaste in the basket. - sub_tasks: - - subtask: Grasp the shampoo with left gripper - subtask_index: 0 - - subtask: Grasp the soft facial cleanser with right gripper - subtask_index: 1 - - subtask: Place the pink laundry detergent in the basket with right gripper - subtask_index: 2 - - subtask: Place the cleaning agent in the basket with left gripper - subtask_index: 3 - - subtask: Place the shampoo in the basket with right gripper - subtask_index: 4 - - subtask: Grasp the shampoo with right gripper - subtask_index: 5 - - subtask: Place the pink laundry detergent in the basket with left gripper - subtask_index: 6 - - subtask: Place the soft facial cleanser in the basket with right gripper - subtask_index: 7 - - subtask: Grasp the pink laundry detergent with right gripper - subtask_index: 8 - - subtask: Grasp the soft facial cleanser with left gripper - subtask_index: 9 - - subtask: Place the cleaning agent in the basket with right gripper - subtask_index: 10 - - subtask: Place the soft facial cleanser in the basket with left gripper - subtask_index: 11 - - subtask: Grasp the pink laundry detergent with left gripper - subtask_index: 12 - - subtask: Place the shampoo in the basket with left gripper - subtask_index: 13 - - subtask: End - subtask_index: 14 - - subtask: Grasp the cleaning agent with right gripper - subtask_index: 15 - - subtask: Grasp the cleaning agent with left gripper - subtask_index: 16 - - subtask: 'null' - subtask_index: 17 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 29779 - dataset_size: 506.53 MB - data_structure: 'Agilex_Cobot_Magic_storage_object_closest_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_closest_apple.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_closest_apple.yaml index 0d0a71ecdf566f220223b7009e0b123e94630b00..dc0e21fa7b5487509037ae89917c6828b26bf493 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_object_closest_apple.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_closest_apple.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -75,41 +75,59 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a picker to grab the item closest to the apple and place it - in the basket. +task_instruction: +- use a picker to grab the item closest to the apple and place it in the basket. sub_tasks: -- Grasp the Rubik's Cube with the left gripper -- Grasp the chalkboard eraser with the right gripper -- Place the chalkboard eraser into the basket with the right gripper -- Place the shower sphere into the basket with the left gripper -- Place the mango into the basket with the right gripper -- Place the Rubik's Cube into the basket with the left gripper -- Place the shower sphere into the basket with the right gripper -- Grasp the mango with the right gripper -- Grasp the Rubik's Cube with the right gripper -- End -- Place the Rubik's Cube into the basket with the right gripper -- Grasp the mango with the left gripper -- Place the mango into the basket with the left gripper -- Grasp the shower sphere with the right gripper -- Place the chalkboard eraser into the basket with the left gripper -- Grasp the shower sphere with the left gripper -- Grasp the chalkboard eraser with the left gripper -- 'null' +- subtask: Grasp the Rubik's Cube with the left gripper + subtask_index: 0 +- subtask: Grasp the chalkboard eraser with the right gripper + subtask_index: 1 +- subtask: Place the chalkboard eraser into the basket with the right gripper + subtask_index: 2 +- subtask: Place the shower sphere into the basket with the left gripper + subtask_index: 3 +- subtask: Place the mango into the basket with the right gripper + subtask_index: 4 +- subtask: Place the Rubik's Cube into the basket with the left gripper + subtask_index: 5 +- subtask: Place the shower sphere into the basket with the right gripper + subtask_index: 6 +- subtask: Grasp the mango with the right gripper + subtask_index: 7 +- subtask: Grasp the Rubik's Cube with the right gripper + subtask_index: 8 +- subtask: End + subtask_index: 9 +- subtask: Place the Rubik's Cube into the basket with the right gripper + subtask_index: 10 +- subtask: Grasp the mango with the left gripper + subtask_index: 11 +- subtask: Place the mango into the basket with the left gripper + subtask_index: 12 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 13 +- subtask: Place the chalkboard eraser into the basket with the left gripper + subtask_index: 14 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 15 +- subtask: Grasp the chalkboard eraser with the left gripper + subtask_index: 16 +- subtask: 'null' + subtask_index: 17 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -117,13 +135,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -131,8 +146,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 13231 fps: 30 @@ -233,11 +247,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_object_closest_apple_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -510,7 +522,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -518,7 +530,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -545,229 +556,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_object_closest_apple - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a picker to grab the item closest to the apple and place it in the basket. - sub_tasks: - - subtask: Grasp the Rubik's Cube with the left gripper - subtask_index: 0 - - subtask: Grasp the chalkboard eraser with the right gripper - subtask_index: 1 - - subtask: Place the chalkboard eraser into the basket with the right gripper - subtask_index: 2 - - subtask: Place the shower sphere into the basket with the left gripper - subtask_index: 3 - - subtask: Place the mango into the basket with the right gripper - subtask_index: 4 - - subtask: Place the Rubik's Cube into the basket with the left gripper - subtask_index: 5 - - subtask: Place the shower sphere into the basket with the right gripper - subtask_index: 6 - - subtask: Grasp the mango with the right gripper - subtask_index: 7 - - subtask: Grasp the Rubik's Cube with the right gripper - subtask_index: 8 - - subtask: End - subtask_index: 9 - - subtask: Place the Rubik's Cube into the basket with the right gripper - subtask_index: 10 - - subtask: Grasp the mango with the left gripper - subtask_index: 11 - - subtask: Place the mango into the basket with the left gripper - subtask_index: 12 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 13 - - subtask: Place the chalkboard eraser into the basket with the left gripper - subtask_index: 14 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 15 - - subtask: Grasp the chalkboard eraser with the left gripper - subtask_index: 16 - - subtask: 'null' - subtask_index: 17 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 13231 - dataset_size: 189.68 MB - data_structure: 'Agilex_Cobot_Magic_storage_object_closest_apple_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (37 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_closest_cube.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_closest_cube.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4c3eae648c6a3de64c1fe66ebc200686ebf1f97e --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_closest_cube.yaml @@ -0,0 +1,483 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_object_closest_cube +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: apple + level1: food + level2: apple + level3: null + level4: null + level5: null +- object_name: rubik's_cube + level1: toys + level2: rubik's_cube + level3: null + level4: null + level5: null +- object_name: whiteboard_erasers + level1: stationery + level2: whiteboard_erasers + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_necessities + level2: bathing_in_flowers + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a picker to grab the item closest to the cube and place it in the basket. +sub_tasks: +- subtask: Place the apple into the basket with the right gripper + subtask_index: 0 +- subtask: Place the blackboard erasure into the basket with the left gripper + subtask_index: 1 +- subtask: Place the mango into the basket with the right gripper + subtask_index: 2 +- subtask: Grasp the mango with the right gripper + subtask_index: 3 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 4 +- subtask: Grasp the Shower puff with the left gripper + subtask_index: 5 +- subtask: Place the Shower puff into the basket with the right gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Place the Shower puff into the basket with the left gripper + subtask_index: 8 +- subtask: Grasp the mango with the left gripper + subtask_index: 9 +- subtask: Place the mango into the basket with the left gripper + subtask_index: 10 +- subtask: Grasp the Shower puff with the right gripper + subtask_index: 11 +- subtask: Grasp the apple with the right gripper + subtask_index: 12 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 13 +- subtask: Place the blackboard erasure into the basket with the right gripper + subtask_index: 14 +- subtask: 'null' + subtask_index: 15 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 49 + total_frames: 12420 + fps: 30 + total_tasks: 16 + total_videos: 147 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 154.87 MB +frame_num: 12420 +dataset_size: 154.87 MB +data_structure: "Agilex_Cobot_Magic_storage_object_closest_cube_qced_hardlink/\n|--\ + \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ + | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:48 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml index eebda2b0e1c42805a8b7e644b368c1de4baa46a0..0319c368f0e7f934225913bcc753b9b03f35812b 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -75,36 +75,49 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to grab items from the table and place them - in the basket. +task_instruction: +- use the left gripper to grab items from the table and place them in the basket. sub_tasks: -- Grasp the Rubik's Cube with the left gripper -- Place the Rubik's Cube into the basket with the left gripper -- Grasp the apple rubber puff with the left gripper -- Grasp the mango with the right gripper -- Grasp the Shower puff with the left gripper -- End -- Place the apple rubber puff into the basket with the left gripper -- Place the Shower puff into the basket with the left gripper -- Grasp the mango with the left gripper -- Place the mango into the basket with the left gripper -- Place the chalkboard eraser into the basket with the left gripper -- Grasp the chalkboard eraser with the left gripper -- 'null' +- subtask: Grasp the Rubik's Cube with the left gripper + subtask_index: 0 +- subtask: Place the Rubik's Cube into the basket with the left gripper + subtask_index: 1 +- subtask: Grasp the apple rubber puff with the left gripper + subtask_index: 2 +- subtask: Grasp the mango with the right gripper + subtask_index: 3 +- subtask: Grasp the Shower puff with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the apple rubber puff into the basket with the left gripper + subtask_index: 6 +- subtask: Place the Shower puff into the basket with the left gripper + subtask_index: 7 +- subtask: Grasp the mango with the left gripper + subtask_index: 8 +- subtask: Place the mango into the basket with the left gripper + subtask_index: 9 +- subtask: Place the chalkboard eraser into the basket with the left gripper + subtask_index: 10 +- subtask: Grasp the chalkboard eraser with the left gripper + subtask_index: 11 +- subtask: 'null' + subtask_index: 12 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -112,13 +125,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -126,8 +136,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 26742 fps: 30 @@ -228,11 +237,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_object_left_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -505,7 +512,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -513,7 +520,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -540,219 +546,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_object_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to grab items from the table and place them in the basket. - sub_tasks: - - subtask: Grasp the Rubik's Cube with the left gripper - subtask_index: 0 - - subtask: Place the Rubik's Cube into the basket with the left gripper - subtask_index: 1 - - subtask: Grasp the apple rubber puff with the left gripper - subtask_index: 2 - - subtask: Grasp the mango with the right gripper - subtask_index: 3 - - subtask: Grasp the Shower puff with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Place the apple rubber puff into the basket with the left gripper - subtask_index: 6 - - subtask: Place the Shower puff into the basket with the left gripper - subtask_index: 7 - - subtask: Grasp the mango with the left gripper - subtask_index: 8 - - subtask: Place the mango into the basket with the left gripper - subtask_index: 9 - - subtask: Place the chalkboard eraser into the basket with the left gripper - subtask_index: 10 - - subtask: Grasp the chalkboard eraser with the left gripper - subtask_index: 11 - - subtask: 'null' - subtask_index: 12 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 26742 - dataset_size: 360.15 MB - data_structure: 'Agilex_Cobot_Magic_storage_object_left_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml index b2e8fa1dfbd1c6cadb01dcda8bff155a91fad452..05c45e62e8cc4e0c3938e24a34bea651514f6a26 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_red_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: red_table_cloths level1: laboratory_supplies level2: white_table_cloths @@ -363,45 +363,67 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up an item with a gripper and place it in a random container - on the desktop. +task_instruction: +- pick up an item with a gripper and place it in a random container on the desktop. sub_tasks: -- Place the XX into the blue bowl with the left gripper -- Place the XX into the purple pot with the right gripper -- Place the XX into the pink pot with the right gripper -- Grasp the XX with the right gripper -- Place the XX into the purple pot with the left gripper -- End -- Place the XX into the pink pot with the left gripper -- Place the XX into the cyan plate with the right gripper -- Place the XX into the cyan plate with the left gripper -- Place the XX into the red pot with the left gripper -- Place the XX into the pen container with the right gripper -- Grasp the XX with the left gripper -- Place the XX into the blue bowl with the right gripper -- Place the XX into the pink bowl with the left gripper -- Place the XX into the red pot with the right gripper -- Place the XX into the pink bowl with the right gripper -- Place the XX into the white plate with the left gripper -- Place the XX into the white plate with the right gripper -- Place the XX into the blue plate with the left gripper -- Place the XX into the pen container with the left gripper -- Place the XX into the blue plate with the right gripper -- 'null' +- subtask: Place the XX into the blue bowl with the left gripper + subtask_index: 0 +- subtask: Place the XX into the purple pot with the right gripper + subtask_index: 1 +- subtask: Place the XX into the pink pot with the right gripper + subtask_index: 2 +- subtask: Grasp the XX with the right gripper + subtask_index: 3 +- subtask: Place the XX into the purple pot with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the XX into the pink pot with the left gripper + subtask_index: 6 +- subtask: Place the XX into the cyan plate with the right gripper + subtask_index: 7 +- subtask: Place the XX into the cyan plate with the left gripper + subtask_index: 8 +- subtask: Place the XX into the red pot with the left gripper + subtask_index: 9 +- subtask: Place the XX into the pen container with the right gripper + subtask_index: 10 +- subtask: Grasp the XX with the left gripper + subtask_index: 11 +- subtask: Place the XX into the blue bowl with the right gripper + subtask_index: 12 +- subtask: Place the XX into the pink bowl with the left gripper + subtask_index: 13 +- subtask: Place the XX into the red pot with the right gripper + subtask_index: 14 +- subtask: Place the XX into the pink bowl with the right gripper + subtask_index: 15 +- subtask: Place the XX into the white plate with the left gripper + subtask_index: 16 +- subtask: Place the XX into the white plate with the right gripper + subtask_index: 17 +- subtask: Place the XX into the blue plate with the left gripper + subtask_index: 18 +- subtask: Place the XX into the pen container with the left gripper + subtask_index: 19 +- subtask: Place the XX into the blue plate with the right gripper + subtask_index: 20 +- subtask: 'null' + subtask_index: 21 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -409,13 +431,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -423,8 +442,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 200 total_frames: 99000 fps: 30 @@ -525,11 +543,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_object_red_tablecloth_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:199 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -802,7 +818,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -810,7 +826,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -837,237 +852,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_object_red_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up an item with a gripper and place it in a random container on the desktop. - sub_tasks: - - subtask: Place the XX into the blue bowl with the left gripper - subtask_index: 0 - - subtask: Place the XX into the purple pot with the right gripper - subtask_index: 1 - - subtask: Place the XX into the pink pot with the right gripper - subtask_index: 2 - - subtask: Grasp the XX with the right gripper - subtask_index: 3 - - subtask: Place the XX into the purple pot with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Place the XX into the pink pot with the left gripper - subtask_index: 6 - - subtask: Place the XX into the cyan plate with the right gripper - subtask_index: 7 - - subtask: Place the XX into the cyan plate with the left gripper - subtask_index: 8 - - subtask: Place the XX into the red pot with the left gripper - subtask_index: 9 - - subtask: Place the XX into the pen container with the right gripper - subtask_index: 10 - - subtask: Grasp the XX with the left gripper - subtask_index: 11 - - subtask: Place the XX into the blue bowl with the right gripper - subtask_index: 12 - - subtask: Place the XX into the pink bowl with the left gripper - subtask_index: 13 - - subtask: Place the XX into the red pot with the right gripper - subtask_index: 14 - - subtask: Place the XX into the pink bowl with the right gripper - subtask_index: 15 - - subtask: Place the XX into the white plate with the left gripper - subtask_index: 16 - - subtask: Place the XX into the white plate with the right gripper - subtask_index: 17 - - subtask: Place the XX into the blue plate with the left gripper - subtask_index: 18 - - subtask: Place the XX into the pen container with the left gripper - subtask_index: 19 - - subtask: Place the XX into the blue plate with the right gripper - subtask_index: 20 - - subtask: 'null' - subtask_index: 21 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 99000 - dataset_size: 2.88 GB - data_structure: 'Agilex_Cobot_Magic_storage_object_red_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (188 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_left.yaml b/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_left.yaml index 545860d1feefb105cb900ae80d345203da617e59..cfbba9f565edb73acd422d97f9224b57e42fa316 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_left.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,27 +51,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: left gripper grabs oranges and puts them in the basket. +task_instruction: +- left gripper grabs oranges and puts them in the basket. sub_tasks: -- End -- Place the orange in the basket with left gripper -- Abnormal -- Grasp the orange with left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the orange in the basket with left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Grasp the orange with left gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +85,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 40181 fps: 30 @@ -195,11 +197,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_orange_basket_left_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -472,7 +472,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -480,7 +480,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -507,203 +506,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_orange_basket_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - left gripper grabs oranges and puts them in the basket. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the orange in the basket with left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Grasp the orange with left gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 40181 - dataset_size: 488.80 MB - data_structure: 'Agilex_Cobot_Magic_storage_orange_basket_left_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_right.yaml b/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_right.yaml index fe2ba493129f42f8e7d722c18825b5144078b61a..b24318d27848ee1e708a9579aa49228b3baa1195 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_right.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_orange_basket_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,27 +51,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the right gripper grabs oranges and puts them into the basket. +task_instruction: +- the right gripper grabs oranges and puts them into the basket. sub_tasks: -- Place the orange in the basket with right gripper -- End -- Abnormal -- Grasp the orange with right gripper -- 'null' +- subtask: Place the orange in the basket with right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Grasp the orange with right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +85,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 22531 fps: 30 @@ -195,11 +197,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_orange_basket_right_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -472,7 +472,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -480,7 +480,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -507,203 +506,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_orange_basket_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the right gripper grabs oranges and puts them into the basket. - sub_tasks: - - subtask: Place the orange in the basket with right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Grasp the orange with right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 22531 - dataset_size: 233.17 MB - data_structure: 'Agilex_Cobot_Magic_storage_orange_basket_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_orange_white_bag.yaml b/dataset_info/Agilex_Cobot_Magic_storage_orange_white_bag.yaml index 1b9bf828773aa053645638f5841fb1151c84aa71..5277f6a912ec27c56c8be053e164d8111e51ea83 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_orange_white_bag.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_orange_white_bag.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,30 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the white linen bag and put the orange in the bag. +task_instruction: +- open the white linen bag and put the orange in the bag. sub_tasks: -- Grasp the orange with right gripper -- Grasp the orange with left gripper -- End -- Lift the handbag with left gripper -- Place the orange in the handbag with right gripper -- Place the orange in the basket with right gripper -- Put down the handbag with left gripper -- 'null' +- subtask: Grasp the orange with right gripper + subtask_index: 0 +- subtask: Grasp the orange with left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Lift the handbag with left gripper + subtask_index: 3 +- subtask: Place the orange in the handbag with right gripper + subtask_index: 4 +- subtask: Place the orange in the basket with right gripper + subtask_index: 5 +- subtask: Put down the handbag with left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -82,13 +91,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -96,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 43104 fps: 30 @@ -182,11 +187,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_orange_white_bag_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -459,7 +462,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -467,7 +470,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -494,193 +496,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_orange_white_bag - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the white linen bag and put the orange in the bag. - sub_tasks: - - subtask: Grasp the orange with right gripper - subtask_index: 0 - - subtask: Grasp the orange with left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Lift the handbag with left gripper - subtask_index: 3 - - subtask: Place the orange in the handbag with right gripper - subtask_index: 4 - - subtask: Place the orange in the basket with right gripper - subtask_index: 5 - - subtask: Put down the handbag with left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 43104 - dataset_size: 406.02 MB - data_structure: 'Agilex_Cobot_Magic_storage_orange_white_bag_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml index b1416c62dc363aa66e716dbfd20be09432c8e859..690f6e3174c97b3b68e97bc5e8adf579cfadf296 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_brown_bag.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,28 +51,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the brown linen bag and put the peach in the bag. +task_instruction: +- open the brown linen bag and put the peach in the bag. sub_tasks: -- Put down the handbag with left grippe -- End -- Place the peach in the handbag with right gripper -- Lift the handbag with left gripper -- Grasp the peach with right gripper -- 'null' +- subtask: Put down the handbag with left grippe + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the peach in the handbag with right gripper + subtask_index: 2 +- subtask: Lift the handbag with left gripper + subtask_index: 3 +- subtask: Grasp the peach with right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +87,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 49252 fps: 30 @@ -180,11 +183,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_peach_brown_bag_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -457,7 +458,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -465,7 +466,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -492,189 +492,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_peach_brown_bag - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the brown linen bag and put the peach in the bag. - sub_tasks: - - subtask: Put down the handbag with left grippe - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the peach in the handbag with right gripper - subtask_index: 2 - - subtask: Lift the handbag with left gripper - subtask_index: 3 - - subtask: Grasp the peach with right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 49252 - dataset_size: 568.64 MB - data_structure: 'Agilex_Cobot_Magic_storage_peach_brown_bag_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml index 4bc47eacabd6b534d43fd60235e28192794d97c6..e5efc67d63e8c59dd5497ed8c147ed8f1e484c21 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,27 +51,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the peach in the basket with your left . +task_instruction: +- put the peach in the basket with your left . sub_tasks: -- End -- Place the peach in the basket with left gripper -- Grasp the peach with left gripper -- Abnormal -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the peach in the basket with left gripper + subtask_index: 1 +- subtask: Grasp the peach with left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +85,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 21979 fps: 30 @@ -195,11 +197,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_peach_left_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -472,7 +472,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -480,7 +480,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -507,203 +506,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_peach_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the peach in the basket with your left . - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the peach in the basket with left gripper - subtask_index: 1 - - subtask: Grasp the peach with left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 21979 - dataset_size: 264.71 MB - data_structure: 'Agilex_Cobot_Magic_storage_peach_left_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml index 732be1eda8c2e71ad470fe00c0f0ba33e71add72..dd836e05259cf5847532d1d647e4bb792f21a5c5 100644 --- a/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,27 +51,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the peach in the basket with right arm. +task_instruction: +- put the peach in the basket with right arm. sub_tasks: -- Grasp the peach with right gripper -- End -- Place the peach in the basket with right gripper -- Abnormal -- 'null' +- subtask: Grasp the peach with right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the peach in the basket with right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - lift - lower -robot_name: Agilex_Cobot_Magic +robot_name: +- Agilex_Cobot_Magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +85,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 25876 fps: 30 @@ -195,11 +197,9 @@ data_structure: 'Agilex_Cobot_Magic_storage_peach_right_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -472,7 +472,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -480,7 +480,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -507,203 +506,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Agilex_Cobot_Magic_storage_peach_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the peach in the basket with right arm. - sub_tasks: - - subtask: Grasp the peach with right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the peach in the basket with right gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Agilex_Cobot_Magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 25876 - dataset_size: 277.18 MB - data_structure: 'Agilex_Cobot_Magic_storage_peach_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- backup - - | |-- data - - | | `-- chunk-000 - - | `-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_white_bag.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_white_bag.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e10223e0b7ca725c3a7a67d2c5f8b880c7f34dc5 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_white_bag.yaml @@ -0,0 +1,499 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_peach_white_bag +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: white_canvas_bags + level1: daily_necessities + level2: white_canvas_bags + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- open the white linen bag and put the peach in the bag. +sub_tasks: +- subtask: Lift the handbag with right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the peach in the handbag with right gripper + subtask_index: 2 +- subtask: Lift the handbag with left gripper + subtask_index: 3 +- subtask: Grasp the peach with right gripper + subtask_index: 4 +- subtask: Put down the handbag with left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 42094 + fps: 30 + total_tasks: 7 + total_videos: 300 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 392.58 MB +frame_num: 42094 +dataset_size: 392.58 MB +data_structure: 'Agilex_Cobot_Magic_storage_peach_white_bag_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (88 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_towel.yaml b/dataset_info/Agilex_Cobot_Magic_storage_towel.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3bbbb2ffb1e4d5ce1258d7a855bca16d5a3a7ecc --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_towel.yaml @@ -0,0 +1,450 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_towel +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: basket + level1: home_storage + level2: basket + level3: null + level4: null + level5: null +- object_name: towel + level1: daily_necessities + level2: towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the folded towels and put them in the basket. +sub_tasks: +- subtask: 'Right hand: grab the folded purple towel' + subtask_index: 0 +- subtask: 'Right hand: grab the folded brown towel' + subtask_index: 1 +- subtask: 'Right hand: grab the folded grey towel' + subtask_index: 2 +- subtask: 'Right hand: place the blue towel in right basket' + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'Right hand: place the grey towel in right basket' + subtask_index: 5 +- subtask: 'Right hand: place the purple towel in right basket' + subtask_index: 6 +- subtask: 'Right hand: place the brown towel in right basket' + subtask_index: 7 +- subtask: 'Right hand: grab the folded blue towel' + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 +atomic_actions: +- grasp +- fold +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 1176 + total_frames: 512832 + fps: 30 + total_tasks: 10 + total_videos: 3528 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 34.54 GB +frame_num: 512832 +dataset_size: 34.54 GB +data_structure: "Agilex_Cobot_Magic_storage_towel_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (1164 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:1175 + val: 960:1080 + test: 1080:1201 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_head_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_sweep_coffee_beans.yaml b/dataset_info/Agilex_Cobot_Magic_sweep_coffee_beans.yaml new file mode 100644 index 0000000000000000000000000000000000000000..46b367c57034aac7462fb8789ec99ed78a6989fa --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_sweep_coffee_beans.yaml @@ -0,0 +1,460 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_sweep_coffee_beans +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: storage_rack + level1: home_storage + level2: storage_rack + level3: null + level4: null + level5: null +- object_name: coffee_beans + level1: food + level2: coffee_beans + level3: null + level4: null + level5: null +- object_name: dustpan + level1: cleaning_supplies + level2: dustpan + level3: null + level4: null + level5: null +- object_name: small_broom + level1: cleaning_supplies + level2: small_broom + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use the right gripper to pick up the brush on the shelf, and the left gripper to + hold the dustpan on the table. Use the brush to sweep the coffee beans on the table + into the dustpan. +sub_tasks: +- subtask: Abnormal + subtask_index: 0 +- subtask: Left hand:place the dustpan on the table + subtask_index: 1 +- subtask: Right hand:sweep the coffee beans into the dustpan + subtask_index: 2 +- subtask: 'Left hand: grab the dustpan' + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Right hand:place the broom on the rack + subtask_index: 5 +- subtask: 'Left hand: move the dustpan to the left side of the coffee beans' + subtask_index: 6 +- subtask: Right hand:grab the broom + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 +atomic_actions: +- grasp +- pick +- place +- sweep +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_right_wrist_rgb +- cam_left_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=h264, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 359 + total_frames: 331666 + fps: 30 + total_tasks: 9 + total_videos: 1077 + total_chunks: 1 + chunks_size: 10000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 13.24 GB +frame_num: 331666 +dataset_size: 13.24 GB +data_structure: "Agilex_Cobot_Magic_sweep_coffee_bean_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (347 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:358 +features: + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - left_gripper_open + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - right_gripper_open + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.fps: 30.0 + video.height: 480 + video.width: 640 + video.channels: 3 + video.codec: h264 + video.pix_fmt: yuv420p + video.is_depth_map: false + has_audio: false + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_right_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_click_pen.yaml b/dataset_info/Airbot_MMK2_click_pen.yaml index 88cf2e2bf0a985acec1c2243d6696726b4c2bf80..d9264e7b9e5e64c5dc750d79fba3bcd2554f3edc 100644 --- a/dataset_info/Airbot_MMK2_click_pen.yaml +++ b/dataset_info/Airbot_MMK2_click_pen.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,32 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the pen with your hand, press the pen switch and then place - it on the table. +task_instruction: +- pick up the pen with your hand, press the pen switch and then place it on the table. sub_tasks: -- End -- Lift the pen with the right gripper -- Grasp the pen with the right gripper -- Place the pen on the table with the right gripper -- Abnormal -- Press the pen switch with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Lift the pen with the right gripper + subtask_index: 1 +- subtask: Grasp the pen with the right gripper + subtask_index: 2 +- subtask: Place the pen on the table with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Press the pen switch with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - garsp - pick - place - pressbutton -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 30984 fps: 30 @@ -127,11 +130,9 @@ data_structure: "Airbot_MMK2_click_pen_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,136 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_click_pen - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the pen with your hand, press the pen switch and then place it on the - table. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Lift the pen with the right gripper - subtask_index: 1 - - subtask: Grasp the pen with the right gripper - subtask_index: 2 - - subtask: Place the pen on the table with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Press the pen switch with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - garsp - - pick - - place - - pressbutton - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 30984 - dataset_size: 1.12 GB - data_structure: "Airbot_MMK2_click_pen_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_close_door_left.yaml b/dataset_info/Airbot_MMK2_close_door_left.yaml index f476c81c0461846f290e8d67b8727a5827edca88..55f83b6fb225b5f031becaeb3313c1058100174f 100644 --- a/dataset_info/Airbot_MMK2_close_door_left.yaml +++ b/dataset_info/Airbot_MMK2_close_door_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cabinet level1: home_storage level2: cabinet @@ -39,25 +39,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the cabinet door with your left hand. +task_instruction: +- close the cabinet door with your left hand. sub_tasks: -- Touch the door with the left gripper -- End -- Close the cupboard door with the left gripper -- 'null' +- subtask: Touch the door with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Close the cupboard door with the left gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -66,13 +71,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -80,8 +82,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 5322 fps: 30 @@ -109,11 +110,9 @@ data_structure: "Airbot_MMK2_close_door_left_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -383,7 +382,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -391,7 +390,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -418,126 +416,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_close_door_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the cabinet door with your left hand. - sub_tasks: - - subtask: Touch the door with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Close the cupboard door with the left gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5322 - dataset_size: 161.77 MB - data_structure: "Airbot_MMK2_close_door_left_qced_hardlink/\n|-- annotations\n|\ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_close_door_right.yaml b/dataset_info/Airbot_MMK2_close_door_right.yaml index 0c72e2eae1c9f96f9394199505621e956d701264..c9df6bd1a9fe5fbed7b02091e55904cbb33e93d2 100644 --- a/dataset_info/Airbot_MMK2_close_door_right.yaml +++ b/dataset_info/Airbot_MMK2_close_door_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cabinet level1: home_storage level2: cabinet @@ -39,25 +39,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the cabinet door with your right hand. +task_instruction: +- close the cabinet door with your right hand. sub_tasks: -- End -- Close the cupboard door with the right gripper -- Touch the door with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Close the cupboard door with the right gripper + subtask_index: 1 +- subtask: Touch the door with the right gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -66,13 +71,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -80,8 +82,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 5437 fps: 30 @@ -109,11 +110,9 @@ data_structure: "Airbot_MMK2_close_door_right_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -383,7 +382,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -391,7 +390,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -418,126 +416,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_close_door_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the cabinet door with your right hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Close the cupboard door with the right gripper - subtask_index: 1 - - subtask: Touch the door with the right gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5437 - dataset_size: 148.62 MB - data_structure: "Airbot_MMK2_close_door_right_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_close_doors.yaml b/dataset_info/Airbot_MMK2_close_doors.yaml index 17e87a7d808828080acc135eacd077b825d34775..e032140723bbad82a21f8fd1990f2d789864f51e 100644 --- a/dataset_info/Airbot_MMK2_close_doors.yaml +++ b/dataset_info/Airbot_MMK2_close_doors.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cabinet level1: home_storage level2: cabinet @@ -39,27 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the door by hand. +task_instruction: +- close the door by hand. sub_tasks: -- End -- Touch the right cabinet door with the right gripper -- Close the door with the right gripper -- Touch the left cabinet door with the left gripper -- Close the door with the left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Touch the right cabinet door with the right gripper + subtask_index: 1 +- subtask: Close the door with the right gripper + subtask_index: 2 +- subtask: Touch the left cabinet door with the left gripper + subtask_index: 3 +- subtask: Close the door with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -68,13 +75,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -82,8 +86,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 8141 fps: 30 @@ -110,11 +113,9 @@ data_structure: "Airbot_MMK2_close_doors_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -384,7 +385,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -392,7 +393,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -419,130 +419,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_close_doors - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: studroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the door by hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Touch the right cabinet door with the right gripper - subtask_index: 1 - - subtask: Close the door with the right gripper - subtask_index: 2 - - subtask: Touch the left cabinet door with the left gripper - subtask_index: 3 - - subtask: Close the door with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8141 - dataset_size: 280.57 MB - data_structure: "Airbot_MMK2_close_doors_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_close_drawer.yaml b/dataset_info/Airbot_MMK2_close_drawer.yaml index 26d9e9b3cb5c719193e60cdc7b18ec8c07c18dff..edce872bef240daac5d072571e5c76de4c8e8052 100644 --- a/dataset_info/Airbot_MMK2_close_drawer.yaml +++ b/dataset_info/Airbot_MMK2_close_drawer.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: layer_transparent_drawer level1: storage_utensils level2: layer_transparent_drawer @@ -39,24 +39,28 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the top drawer by hand. +task_instruction: +- close the top drawer by hand. sub_tasks: -- End -- Close the top drawer with the right hand -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Close the top drawer with the right hand + subtask_index: 1 +- subtask: 'null' + subtask_index: 2 atomic_actions: - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -65,13 +69,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -79,8 +80,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 7377 fps: 30 @@ -107,11 +107,9 @@ data_structure: "Airbot_MMK2_close_drawer_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -381,7 +379,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -389,7 +387,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -416,124 +413,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_close_drawer - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the top drawer by hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Close the top drawer with the right hand - subtask_index: 1 - - subtask: 'null' - subtask_index: 2 - atomic_actions: - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7377 - dataset_size: 217.36 MB - data_structure: "Airbot_MMK2_close_drawer_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_close_lid.yaml b/dataset_info/Airbot_MMK2_close_lid.yaml index 78cbb695be4c8e0109bc7bc7ff969a0217e1b656..c957d53912b8d7f3488033991ef24f4dbe10f0f0 100644 --- a/dataset_info/Airbot_MMK2_close_lid.yaml +++ b/dataset_info/Airbot_MMK2_close_lid.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: any_storage_box level1: storage utensils level2: any_storage_box @@ -39,28 +39,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: close the box lid by hand. +task_instruction: +- close the box lid by hand. sub_tasks: -- Touch the box lid with the left gripper -- Close the box lid with the left gripper -- End -- Touch the box lid with the right gripper -- Abnormal -- Close the box lid with the right gripper -- 'null' +- subtask: Touch the box lid with the left gripper + subtask_index: 0 +- subtask: Close the box lid with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Touch the box lid with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Close the box lid with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - close -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -69,13 +77,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -83,8 +88,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 239 total_frames: 28930 fps: 30 @@ -111,11 +115,9 @@ data_structure: "Airbot_MMK2_close_lid_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:238 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -385,7 +387,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -393,7 +395,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -420,132 +421,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_close_lid - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - close the box lid by hand. - sub_tasks: - - subtask: Touch the box lid with the left gripper - subtask_index: 0 - - subtask: Close the box lid with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Touch the box lid with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Close the box lid with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - close - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 28930 - dataset_size: 965.25 MB - data_structure: "Airbot_MMK2_close_lid_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (227 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_cover_lid.yaml b/dataset_info/Airbot_MMK2_cover_lid.yaml index e3ea3e4aceaebd57c75e7984dcd48d5318340568..a1cd8f7bbec5cc4e754c0442b0a39787c5ec3198 100644 --- a/dataset_info/Airbot_MMK2_cover_lid.yaml +++ b/dataset_info/Airbot_MMK2_cover_lid.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: lid level1: laboratory_supplies level2: lid @@ -45,28 +45,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the lid with both hands and cover the box. +task_instruction: +- pick up the lid with both hands and cover the box. sub_tasks: -- Place the lid on the box with the left gripper -- End -- Grasp the lid with the right gripper -- Place the lid on the box with the right gripper -- Grasp the lid with the left gripper -- 'null' +- subtask: Place the lid on the box with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the lid with the right gripper + subtask_index: 2 +- subtask: Place the lid on the box with the right gripper + subtask_index: 3 +- subtask: Grasp the lid with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - lift - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -75,13 +82,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -89,8 +93,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 16213 fps: 30 @@ -117,11 +120,9 @@ data_structure: "Airbot_MMK2_cover_lid_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -391,7 +392,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -399,7 +400,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -426,131 +426,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_cover_lid - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the lid with both hands and cover the box. - sub_tasks: - - subtask: Place the lid on the box with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the lid with the right gripper - subtask_index: 2 - - subtask: Place the lid on the box with the right gripper - subtask_index: 3 - - subtask: Grasp the lid with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - lift - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 16213 - dataset_size: 569.80 MB - data_structure: "Airbot_MMK2_cover_lid_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_cut_scallion.yaml b/dataset_info/Airbot_MMK2_cut_scallion.yaml index 7582b66235fb8cc8a5d14b12ac951b5fa6c7e55a..e97e23be44ee8d6c55c6bc3c9a36c9d846b821c9 100644 --- a/dataset_info/Airbot_MMK2_cut_scallion.yaml +++ b/dataset_info/Airbot_MMK2_cut_scallion.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: kitchen_knife level1: kitchen_supplies level2: kitchen_knife @@ -45,30 +45,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the kitchen knife with your hand and cut the vegetables. +task_instruction: +- pick up the kitchen knife with your hand and cut the vegetables. sub_tasks: -- Grasp the kitchen knife with the right gripper -- Place the kitchen knife back on the knife holder with the right gripper -- End -- Cut scallions with the right gripper -- Press the scallion with the left gripper -- 'null' +- subtask: Grasp the kitchen knife with the right gripper + subtask_index: 0 +- subtask: Place the kitchen knife back on the knife holder with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Cut scallions with the right gripper + subtask_index: 3 +- subtask: Press the scallion with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place - cut -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 97 total_frames: 33460 fps: 30 @@ -119,11 +122,9 @@ data_structure: "Airbot_MMK2_cut_scallion_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:96 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +394,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +402,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,133 +428,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_cut_scallion - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the kitchen knife with your hand and cut the vegetables. - sub_tasks: - - subtask: Grasp the kitchen knife with the right gripper - subtask_index: 0 - - subtask: Place the kitchen knife back on the knife holder with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Cut scallions with the right gripper - subtask_index: 3 - - subtask: Press the scallion with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - - cut - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 33460 - dataset_size: 1.17 GB - data_structure: "Airbot_MMK2_cut_scallion_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (85 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_dial_number.yaml b/dataset_info/Airbot_MMK2_dial_number.yaml index 90a3cee03eef2839a34f818dd472b5baa9235895..cf15331b23f430b0f08f00919c80afb87ed0ff54 100644 --- a/dataset_info/Airbot_MMK2_dial_number.yaml +++ b/dataset_info/Airbot_MMK2_dial_number.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: mobile_phone level1: appliances level2: mobile_phone @@ -39,30 +39,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the phone button and then put it down. +task_instruction: +- pick up the phone button and then put it down. sub_tasks: -- Dial the number with the left gripper -- End -- Lift the phone with the right gripper -- Grasp the phone with the right gripper -- Place the phone on the table with the right gripper -- 'null' +- subtask: Dial the number with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Lift the phone with the right gripper + subtask_index: 2 +- subtask: Grasp the phone with the right gripper + subtask_index: 3 +- subtask: Place the phone on the table with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place - press -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +78,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +89,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 41296 fps: 30 @@ -113,11 +116,9 @@ data_structure: "Airbot_MMK2_dial_number_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -387,7 +388,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -395,7 +396,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -422,133 +422,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_dial_number - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the phone button and then put it down. - sub_tasks: - - subtask: Dial the number with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Lift the phone with the right gripper - subtask_index: 2 - - subtask: Grasp the phone with the right gripper - subtask_index: 3 - - subtask: Place the phone on the table with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - - press - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 41296 - dataset_size: 1.39 GB - data_structure: "Airbot_MMK2_dial_number_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_doodled_line.yaml b/dataset_info/Airbot_MMK2_doodled_line.yaml index de0b796eeb17e40c55c2cd68c1e9d1ec8eb3ca76..9a03592584486ed9c94136eb08b70c5463a4f942 100644 --- a/dataset_info/Airbot_MMK2_doodled_line.yaml +++ b/dataset_info/Airbot_MMK2_doodled_line.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: ballpoint_pen level1: stationery level2: ballpoint_pen @@ -45,33 +45,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Pick up the ballpoint pen and leave your handwriting on the paper. +task_instruction: +- Pick up the ballpoint pen and leave your handwriting on the paper. sub_tasks: -- Close the pen switch with the right gripper -- End -- Lift the pen with the right gripper -- Grasp the pen with the right gripper -- Place the pen on the table with the right gripper -- Write on paper with a pen with right gripper -- Open the pen switch with the right gripper -- 'null' +- subtask: Close the pen switch with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Lift the pen with the right gripper + subtask_index: 2 +- subtask: Grasp the pen with the right gripper + subtask_index: 3 +- subtask: Place the pen on the table with the right gripper + subtask_index: 4 +- subtask: Write on paper with a pen with right gripper + subtask_index: 5 +- subtask: Open the pen switch with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place - pressbutton - write -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 57693 fps: 30 @@ -122,11 +127,9 @@ data_structure: "Airbot_MMK2_doodled_line_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,138 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_doodled_line - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: laboratory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Pick up the ballpoint pen and leave your handwriting on the paper. - sub_tasks: - - subtask: Close the pen switch with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Lift the pen with the right gripper - subtask_index: 2 - - subtask: Grasp the pen with the right gripper - subtask_index: 3 - - subtask: Place the pen on the table with the right gripper - subtask_index: 4 - - subtask: Write on paper with a pen with right gripper - subtask_index: 5 - - subtask: Open the pen switch with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - - pressbutton - - write - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 57693 - dataset_size: 2.09 GB - data_structure: "Airbot_MMK2_doodled_line_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_apple_orange_pomegranate.yaml b/dataset_info/Airbot_MMK2_move_apple_orange_pomegranate.yaml index 764227de2eb299981defe3114bb27743bb9c980c..d23458e6af41b9ea4e5edff87db3a2c4a4414198 100644 --- a/dataset_info/Airbot_MMK2_move_apple_orange_pomegranate.yaml +++ b/dataset_info/Airbot_MMK2_move_apple_orange_pomegranate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: apple level1: fruits level2: apple @@ -51,32 +51,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: arrange apples, oranges, and pomegranates neatly. +task_instruction: +- arrange apples, oranges, and pomegranates neatly. sub_tasks: -- place the orange on the right side of the table with the right gripper -- Grasp the orange with the right gripper -- place the pomegranate on the middle side of the table with the right gripper -- End -- place the apple on the left side of the table with the left gripper -- Grasp the apple with the left gripper -- Abnormal -- Grasp the pomegranate with the right gripper -- 'null' +- subtask: place the orange on the right side of the table with the right gripper + subtask_index: 0 +- subtask: Grasp the orange with the right gripper + subtask_index: 1 +- subtask: place the pomegranate on the middle side of the table with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: place the apple on the left side of the table with the left gripper + subtask_index: 4 +- subtask: Grasp the apple with the left gripper + subtask_index: 5 +- subtask: Abnormal + subtask_index: 6 +- subtask: Grasp the pomegranate with the right gripper + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +95,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +106,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 59 total_frames: 30023 fps: 30 @@ -128,11 +134,9 @@ data_structure: "Airbot_MMK2_move_apple_orange_pomegranate_qced_hardlink/\n|-- a \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:58 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -402,7 +406,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -410,7 +414,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -437,139 +440,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_apple_orange_pomegranate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - arrange apples, oranges, and pomegranates neatly. - sub_tasks: - - subtask: place the orange on the right side of the table with the right gripper - subtask_index: 0 - - subtask: Grasp the orange with the right gripper - subtask_index: 1 - - subtask: place the pomegranate on the middle side of the table with the right - gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: place the apple on the left side of the table with the left gripper - subtask_index: 4 - - subtask: Grasp the apple with the left gripper - subtask_index: 5 - - subtask: Abnormal - subtask_index: 6 - - subtask: Grasp the pomegranate with the right gripper - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 30023 - dataset_size: 1.09 GB - data_structure: "Airbot_MMK2_move_apple_orange_pomegranate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (47 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_block.yaml b/dataset_info/Airbot_MMK2_move_block.yaml index 0c4231048591ed19d9bdbe9f7a879af57eab0e95..45d23ddc624d7027c3404f47b33e9ef7ffe3a081 100644 --- a/dataset_info/Airbot_MMK2_move_block.yaml +++ b/dataset_info/Airbot_MMK2_move_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: square_building_blocks level1: toys level2: square_building_blocks @@ -45,29 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the square blocks into the circular toy. +task_instruction: +- put the square blocks into the circular toy. sub_tasks: -- Place the yellow block in the blue circle with the right gripper -- Grasp the yellow block with the left gripper -- Grasp the yellow block with the rightt gripper -- Place the yellow block in the blue circle with the left gripper -- End -- 'null' +- subtask: Place the yellow block in the blue circle with the right gripper + subtask_index: 0 +- subtask: Grasp the yellow block with the left gripper + subtask_index: 1 +- subtask: Grasp the yellow block with the rightt gripper + subtask_index: 2 +- subtask: Place the yellow block in the blue circle with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 7264 fps: 30 @@ -118,11 +121,9 @@ data_structure: "Airbot_MMK2_move_block_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,132 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the square blocks into the circular toy. - sub_tasks: - - subtask: Place the yellow block in the blue circle with the right gripper - subtask_index: 0 - - subtask: Grasp the yellow block with the left gripper - subtask_index: 1 - - subtask: Grasp the yellow block with the rightt gripper - subtask_index: 2 - - subtask: Place the yellow block in the blue circle with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7264 - dataset_size: 300.47 MB - data_structure: "Airbot_MMK2_move_block_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_block_both_hands.yaml b/dataset_info/Airbot_MMK2_move_block_both_hands.yaml index 6478fc3fa4a0e6240245cbcb6e935e05fd40d663..bae034fbf1a66ba026c19533ea7d734e8824e702 100644 --- a/dataset_info/Airbot_MMK2_move_block_both_hands.yaml +++ b/dataset_info/Airbot_MMK2_move_block_both_hands.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: square_building_blocks level1: toys level2: square_building_blocks @@ -39,30 +39,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the building blocks with both hands simultaneously and place - them on the blocks. +task_instruction: +- pick up the building blocks with both hands simultaneously and place them on the + blocks. sub_tasks: -- Place the blue block on top of the red block with the left gripper -- End -- Grasp the green block with the right gripper -- Grasp the blue block with the left gripper -- Place the green block on top of the yellow block with the right gripper -- 'null' +- subtask: Place the blue block on top of the red block with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the green block with the right gripper + subtask_index: 2 +- subtask: Grasp the blue block with the left gripper + subtask_index: 3 +- subtask: Place the green block on top of the yellow block with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +78,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +89,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 54 total_frames: 8888 fps: 30 @@ -114,11 +117,9 @@ data_structure: "Airbot_MMK2_move_block_both_hands_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:53 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -388,7 +389,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -396,7 +397,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -423,133 +423,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_block_both_hands - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the building blocks with both hands simultaneously and place them on the - blocks. - sub_tasks: - - subtask: Place the blue block on top of the red block with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the green block with the right gripper - subtask_index: 2 - - subtask: Grasp the blue block with the left gripper - subtask_index: 3 - - subtask: Place the green block on top of the yellow block with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8888 - dataset_size: 388.10 MB - data_structure: "Airbot_MMK2_move_block_both_hands_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_block_gold_bar_models.yaml b/dataset_info/Airbot_MMK2_move_block_gold_bar_models.yaml index 09e51de39d14be65962e2c00d807139d1cf9f6c1..7eb96f91d4ae5c3a466b28d923c9e5fa9559922a 100644 --- a/dataset_info/Airbot_MMK2_move_block_gold_bar_models.yaml +++ b/dataset_info/Airbot_MMK2_move_block_gold_bar_models.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: eyeglass_case level1: laboratory_supplies level2: eyeglass_case @@ -45,32 +45,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the glasses case in the middle of the table with your left - hand and put the toy gold bar on the glasses case with your right hand. +task_instruction: +- place the glasses case in the middle of the table with your left hand and put the + toy gold bar on the glasses case with your right hand. sub_tasks: -- Place the gold bar on the glasses case with the right gripper -- Grasp the glasses case with the left gripper -- Abnormal -- Static -- Place the glasses case on the middle of the table with the left gripper -- End -- Grasp the gold bar the right gripper -- 'null' +- subtask: Place the gold bar on the glasses case with the right gripper + subtask_index: 0 +- subtask: Grasp the glasses case with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Place the glasses case on the middle of the table with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the gold bar the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +88,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 10703 fps: 30 @@ -122,11 +127,9 @@ data_structure: "Airbot_MMK2_move_block_gold_bar_models_qced_hardlink/\n|-- anno \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,137 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_block_gold_bar_models - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the glasses case in the middle of the table with your left hand and put - the toy gold bar on the glasses case with your right hand. - sub_tasks: - - subtask: Place the gold bar on the glasses case with the right gripper - subtask_index: 0 - - subtask: Grasp the glasses case with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Place the glasses case on the middle of the table with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the gold bar the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10703 - dataset_size: 434.03 MB - data_structure: "Airbot_MMK2_move_block_gold_bar_models_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_block_twice.yaml b/dataset_info/Airbot_MMK2_move_block_twice.yaml index 92f49c5e064f75c9edffdb5bc570135b229fce16..e104ea2d4457c0a315170d1cd8f1390ffcecb6ba 100644 --- a/dataset_info/Airbot_MMK2_move_block_twice.yaml +++ b/dataset_info/Airbot_MMK2_move_block_twice.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: rubik's_cube level1: toys level2: rubik's_cube @@ -45,30 +45,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the building blocks on the Rubik's Cube with your left hand - and take them down with your right hand. +task_instruction: +- place the building blocks on the Rubik's Cube with your left hand and take them + down with your right hand. sub_tasks: -- Grasp the yellow build block with the left gripper -- End -- Place the yellow build block on the Rubik's Cube with the left gripper -- Place the yellow build block on the table with the right gripper -- Grasp the yellow build block with the right gripper -- 'null' +- subtask: Grasp the yellow build block with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the yellow build block on the Rubik's Cube with the left gripper + subtask_index: 2 +- subtask: Place the yellow build block on the table with the right gripper + subtask_index: 3 +- subtask: Grasp the yellow build block with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 57 total_frames: 14643 fps: 30 @@ -120,11 +123,9 @@ data_structure: "Airbot_MMK2_move_block_twice_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:56 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,133 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_block_twice - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the building blocks on the Rubik's Cube with your left hand and take them - down with your right hand. - sub_tasks: - - subtask: Grasp the yellow build block with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the yellow build block on the Rubik's Cube with the left gripper - subtask_index: 2 - - subtask: Place the yellow build block on the table with the right gripper - subtask_index: 3 - - subtask: Grasp the yellow build block with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 14643 - dataset_size: 666.57 MB - data_structure: "Airbot_MMK2_move_block_twice_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (45 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_block_wet_wipes.yaml b/dataset_info/Airbot_MMK2_move_block_wet_wipes.yaml index 156d9a7ae70f65f0f941fc6269cff9d5671247b0..3414f0caf9ae3fe3e1cf3f149c62fb9cd4476d05 100644 --- a/dataset_info/Airbot_MMK2_move_block_wet_wipes.yaml +++ b/dataset_info/Airbot_MMK2_move_block_wet_wipes.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_tray level1: kitchen_supplies level2: white_tray @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the wet wipes and building blocks into the white tray respectively - with your left and right hands. +task_instruction: +- put the wet wipes and building blocks into the white tray respectively with your + left and right hands. sub_tasks: -- Place the arched build blocks on the white board with the right gripper -- End -- Grasp the wet wipes with the left gripper -- Grasp the arched build blocks with the right gripper -- Place the wet wipes on the white board with the left gripper -- Abnormal -- 'null' +- subtask: Place the arched build blocks on the white board with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the wet wipes with the left gripper + subtask_index: 2 +- subtask: Grasp the arched build blocks with the right gripper + subtask_index: 3 +- subtask: Place the wet wipes on the white board with the left gripper + subtask_index: 4 +- subtask: Abnormal + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 60 total_frames: 15271 fps: 30 @@ -186,11 +190,9 @@ data_structure: 'Airbot_MMK2_move_block_wet_wipes_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:59 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -460,7 +462,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -468,7 +470,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -495,194 +496,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_block_wet_wipes - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the wet wipes and building blocks into the white tray respectively with your - left and right hands. - sub_tasks: - - subtask: Place the arched build blocks on the white board with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the wet wipes with the left gripper - subtask_index: 2 - - subtask: Grasp the arched build blocks with the right gripper - subtask_index: 3 - - subtask: Place the wet wipes on the white board with the left gripper - subtask_index: 4 - - subtask: Abnormal - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 15271 - dataset_size: 479.18 MB - data_structure: 'Airbot_MMK2_move_block_wet_wipes_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (48 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_book_front.yaml b/dataset_info/Airbot_MMK2_move_book_front.yaml index 091980ef6e2c94c3588d17e08425ce19724271c0..43711cc79dec1e61f1d44fcc681e42da23bc7355 100644 --- a/dataset_info/Airbot_MMK2_move_book_front.yaml +++ b/dataset_info/Airbot_MMK2_move_book_front.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: book level1: stationery level2: book @@ -39,30 +39,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the book with your left hand and hand it to your right hand, - then place it on top of another book. +task_instruction: +- pick up the book with your left hand and hand it to your right hand, then place + it on top of another book. sub_tasks: -- Deliver the yellow book from left gripper to right gripper -- Place the yellow book on the white book with the right gripper -- Grasp the yellow book with the left gripper -- Static -- End -- 'null' +- subtask: Deliver the yellow book from left gripper to right gripper + subtask_index: 0 +- subtask: Place the yellow book on the white book with the right gripper + subtask_index: 1 +- subtask: Grasp the yellow book with the left gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +78,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +89,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 10519 fps: 30 @@ -173,11 +176,9 @@ data_structure: 'Airbot_MMK2_move_book_front_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -447,7 +448,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -455,7 +456,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -482,192 +482,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_book_front - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the book with your left hand and hand it to your right hand, then place - it on top of another book. - sub_tasks: - - subtask: Deliver the yellow book from left gripper to right gripper - subtask_index: 0 - - subtask: Place the yellow book on the white book with the right gripper - subtask_index: 1 - - subtask: Grasp the yellow book with the left gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10519 - dataset_size: 396.69 MB - data_structure: 'Airbot_MMK2_move_book_front_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (37 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_book_right_side.yaml b/dataset_info/Airbot_MMK2_move_book_right_side.yaml index fab38d8ea83c2ee7fb21408e3ab274f61384036d..ae0874c223996d2409e7af5897fb26213e095f6f 100644 --- a/dataset_info/Airbot_MMK2_move_book_right_side.yaml +++ b/dataset_info/Airbot_MMK2_move_book_right_side.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: book level1: stationery level2: book @@ -39,29 +39,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the book with your left hand, hand it to your right hand - and then put it down. +task_instruction: +- pick up the book with your left hand, hand it to your right hand and then put it + down. sub_tasks: -- Pass the yellow book to right gripper -- End -- Grasp the yellow book with left gripper -- Place the yellow book in the center of table with right gripper -- 'null' +- subtask: Pass the yellow book to right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the yellow book with left gripper + subtask_index: 2 +- subtask: Place the yellow book in the center of table with right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -70,13 +76,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -84,8 +87,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 46 total_frames: 7183 fps: 30 @@ -113,11 +115,9 @@ data_structure: "Airbot_MMK2_move_book_right_side_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:45 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -387,7 +387,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -395,7 +395,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -422,131 +421,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_book_right_side - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the book with your left hand, hand it to your right hand and then put - it down. - sub_tasks: - - subtask: Pass the yellow book to right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the yellow book with left gripper - subtask_index: 2 - - subtask: Place the yellow book in the center of table with right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7183 - dataset_size: 225.02 MB - data_structure: "Airbot_MMK2_move_book_right_side_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (34 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_bottle_tape_measure.yaml b/dataset_info/Airbot_MMK2_move_bottle_tape_measure.yaml index 470bfb68c72f2d7ecd28f34468a36a004fd88e1c..2dd62f7fd039dcb9048a804c040310fc4472a198 100644 --- a/dataset_info/Airbot_MMK2_move_bottle_tape_measure.yaml +++ b/dataset_info/Airbot_MMK2_move_bottle_tape_measure.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_basket level1: home_storage level2: white_basket @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the water bottle and the ruler off the white box respectively - with your left and right hands. +task_instruction: +- take the water bottle and the ruler off the white box respectively with your left + and right hands. sub_tasks: -- Grasp the tape measure with the right gripper -- Abnormal -- Grasp the mineral water with the left gripper -- Place the tape measure on the table with the right gripper -- Place the mineral water on the table with the left gripper -- End -- 'null' +- subtask: Grasp the tape measure with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Grasp the mineral water with the left gripper + subtask_index: 2 +- subtask: Place the tape measure on the table with the right gripper + subtask_index: 3 +- subtask: Place the mineral water on the table with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 44 total_frames: 7786 fps: 30 @@ -127,11 +131,9 @@ data_structure: "Airbot_MMK2_move_bottle_tape_measure_qced_hardlink/\n|-- annota \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:43 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_bottle_tape_measure - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the water bottle and the ruler off the white box respectively with your left - and right hands. - sub_tasks: - - subtask: Grasp the tape measure with the right gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Grasp the mineral water with the left gripper - subtask_index: 2 - - subtask: Place the tape measure on the table with the right gripper - subtask_index: 3 - - subtask: Place the mineral water on the table with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7786 - dataset_size: 235.21 MB - data_structure: "Airbot_MMK2_move_bottle_tape_measure_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (32 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_cake.yaml b/dataset_info/Airbot_MMK2_move_cake.yaml index 90ba5c6089b7da2b4a5619a149eb0e079e11226b..4a256a4940b20f1f58e5e33e66ce6a54631ce5e1 100644 --- a/dataset_info/Airbot_MMK2_move_cake.yaml +++ b/dataset_info/Airbot_MMK2_move_cake.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: shelf level1: home_storage level2: shelf @@ -45,35 +45,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place two cakes on the shelf with each hand respectively. +task_instruction: +- place two cakes on the shelf with each hand respectively. sub_tasks: -- Place the ice cream into the plate with the right gripper -- Place the cake onto the block toy with the right gripper -- Grasp the cake with the left gripper -- Place the cake on the yellow cube block with the left gripper -- Place the cake on the blue cube block with the right gripper -- Place the cake onto the block toy with the left gripper -- Grasp the cake with the right gripper -- Static -- Grasp the cake from the table and with the right gripper -- Grasp the cake from the table and with the left gripper -- End -- 'null' +- subtask: Place the ice cream into the plate with the right gripper + subtask_index: 0 +- subtask: Place the cake onto the block toy with the right gripper + subtask_index: 1 +- subtask: Grasp the cake with the left gripper + subtask_index: 2 +- subtask: Place the cake on the yellow cube block with the left gripper + subtask_index: 3 +- subtask: Place the cake on the blue cube block with the right gripper + subtask_index: 4 +- subtask: Place the cake onto the block toy with the left gripper + subtask_index: 5 +- subtask: Grasp the cake with the right gripper + subtask_index: 6 +- subtask: Static + subtask_index: 7 +- subtask: Grasp the cake from the table and with the right gripper + subtask_index: 8 +- subtask: Grasp the cake from the table and with the left gripper + subtask_index: 9 +- subtask: End + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -82,13 +95,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -96,8 +106,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 94 total_frames: 22873 fps: 30 @@ -184,11 +193,9 @@ data_structure: 'Airbot_MMK2_move_cake_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:93 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -458,7 +465,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -466,7 +473,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -493,203 +499,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_cake - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place two cakes on the shelf with each hand respectively. - sub_tasks: - - subtask: Place the ice cream into the plate with the right gripper - subtask_index: 0 - - subtask: Place the cake onto the block toy with the right gripper - subtask_index: 1 - - subtask: Grasp the cake with the left gripper - subtask_index: 2 - - subtask: Place the cake on the yellow cube block with the left gripper - subtask_index: 3 - - subtask: Place the cake on the blue cube block with the right gripper - subtask_index: 4 - - subtask: Place the cake onto the block toy with the left gripper - subtask_index: 5 - - subtask: Grasp the cake with the right gripper - subtask_index: 6 - - subtask: Static - subtask_index: 7 - - subtask: Grasp the cake from the table and with the right gripper - subtask_index: 8 - - subtask: Grasp the cake from the table and with the left gripper - subtask_index: 9 - - subtask: End - subtask_index: 10 - - subtask: 'null' - subtask_index: 11 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 22873 - dataset_size: 1009.39 MB - data_structure: 'Airbot_MMK2_move_cake_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (82 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_cake_tape_measure.yaml b/dataset_info/Airbot_MMK2_move_cake_tape_measure.yaml index ebe5d3fdb1f9b6ad23f67481b5970aa160940c7a..d652236d012a2c28eed2ba0274142f0768be0963 100644 --- a/dataset_info/Airbot_MMK2_move_cake_tape_measure.yaml +++ b/dataset_info/Airbot_MMK2_move_cake_tape_measure.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cake level1: bread level2: cake @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the cake and tape measure with both hands and place them - on the cardboard box. +task_instruction: +- pick up the cake and tape measure with both hands and place them on the cardboard + box. sub_tasks: -- Grasp the tape measure with the right gripper -- Place the bagged waffle on the carton with the left gripper -- Grasp the bagged waffle with the left gripper -- Place the tape measure on the carton with the right gripper -- End -- 'null' +- subtask: Grasp the tape measure with the right gripper + subtask_index: 0 +- subtask: Place the bagged waffle on the carton with the left gripper + subtask_index: 1 +- subtask: Grasp the bagged waffle with the left gripper + subtask_index: 2 +- subtask: Place the tape measure on the carton with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 9381 fps: 30 @@ -126,11 +129,9 @@ data_structure: "Airbot_MMK2_move_cake_tape_measure_qced_hardlink/\n|-- annotati \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +401,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +409,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,133 +435,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_cake_tape_measure - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: courier_station - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the cake and tape measure with both hands and place them on the cardboard - box. - sub_tasks: - - subtask: Grasp the tape measure with the right gripper - subtask_index: 0 - - subtask: Place the bagged waffle on the carton with the left gripper - subtask_index: 1 - - subtask: Grasp the bagged waffle with the left gripper - subtask_index: 2 - - subtask: Place the tape measure on the carton with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9381 - dataset_size: 362.05 MB - data_structure: "Airbot_MMK2_move_cake_tape_measure_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_cup_paper_box.yaml b/dataset_info/Airbot_MMK2_move_cup_paper_box.yaml index c7b3231d7c019daecff1c437aa6348bef6c4b006..0242c0576f4f6686d5307b1bb7f29214e1d0391b 100644 --- a/dataset_info/Airbot_MMK2_move_cup_paper_box.yaml +++ b/dataset_info/Airbot_MMK2_move_cup_paper_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_boxes level1: home_storage level2: paper_boxes @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the paper box on the paper box with your left hand and put - the coffee cup on the plate with your right hand. +task_instruction: +- place the paper box on the paper box with your left hand and put the coffee cup + on the plate with your right hand. sub_tasks: -- Grasp the mouse box with the left gripper -- Place the mouse box on the calculator box with the left gripper -- End -- Grasp the coffee cup with the right gripper -- Place the coffee cup on the white plate with the right gripper -- Place the coffee cup on the pink plate with the right gripper -- 'null' +- subtask: Grasp the mouse box with the left gripper + subtask_index: 0 +- subtask: Place the mouse box on the calculator box with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the coffee cup with the right gripper + subtask_index: 3 +- subtask: Place the coffee cup on the white plate with the right gripper + subtask_index: 4 +- subtask: Place the coffee cup on the pink plate with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 93 total_frames: 25588 fps: 30 @@ -127,11 +131,9 @@ data_structure: "Airbot_MMK2_move_cup_paper_box_qced_hardlink/\n|-- annotations\ \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:92 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_cup_paper_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the paper box on the paper box with your left hand and put the coffee cup - on the plate with your right hand. - sub_tasks: - - subtask: Grasp the mouse box with the left gripper - subtask_index: 0 - - subtask: Place the mouse box on the calculator box with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the coffee cup with the right gripper - subtask_index: 3 - - subtask: Place the coffee cup on the white plate with the right gripper - subtask_index: 4 - - subtask: Place the coffee cup on the pink plate with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 25588 - dataset_size: 929.19 MB - data_structure: "Airbot_MMK2_move_cup_paper_box_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_fake_food.yaml b/dataset_info/Airbot_MMK2_move_fake_food.yaml index 1f10b52ea65dc405b38d97282814025181ec2cb2..af701676939d3406972404909754b69cd70ca974 100644 --- a/dataset_info/Airbot_MMK2_move_fake_food.yaml +++ b/dataset_info/Airbot_MMK2_move_fake_food.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: early_education_toys level1: toys level2: early_education_toys @@ -39,28 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the toy food on the toy table with your right hand. +task_instruction: +- put the toy food on the toy table with your right hand. sub_tasks: -- End -- Grasp the small bowl of canned food with right gripper -- Place the small bowl of canned food on the table with the right gripper -- Abnormal -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the small bowl of canned food with right gripper + subtask_index: 1 +- subtask: Place the small bowl of canned food on the table with the right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -69,13 +75,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -83,8 +86,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 7610 fps: 30 @@ -111,11 +113,9 @@ data_structure: "Airbot_MMK2_move_fake_food_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -385,7 +385,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -393,7 +393,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -420,130 +419,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_fake_food - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the toy food on the toy table with your right hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the small bowl of canned food with right gripper - subtask_index: 1 - - subtask: Place the small bowl of canned food on the table with the right gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7610 - dataset_size: 313.62 MB - data_structure: "Airbot_MMK2_move_fake_food_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_medicine_bottle.yaml b/dataset_info/Airbot_MMK2_move_medicine_bottle.yaml index 09e514cb2938ef130bb8241e826e23ebbefcb41d..a79b530a5a5a086807b137190570fb3045b2e028 100644 --- a/dataset_info/Airbot_MMK2_move_medicine_bottle.yaml +++ b/dataset_info/Airbot_MMK2_move_medicine_bottle.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,32 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the medicine bottles on the table onto the board with both - hands. +task_instruction: +- place the medicine bottles on the table onto the board with both hands. sub_tasks: -- Place the white bottle on the white board with the right gripper -- Abnormal -- Grasp the white bottle with the left gripper -- Static -- End -- Grasp the white bottle the right gripper -- Place the white bottle on the white board with the left gripper -- 'null' +- subtask: Place the white bottle on the white board with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Grasp the white bottle with the left gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the white bottle the right gripper + subtask_index: 5 +- subtask: Place the white bottle on the white board with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 16815 fps: 30 @@ -128,11 +132,9 @@ data_structure: "Airbot_MMK2_move_medicine_bottle_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -402,7 +404,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -410,7 +412,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -437,136 +438,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_medicine_bottle - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: medical_healthcare - level2: pharmacy - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the medicine bottles on the table onto the board with both hands. - sub_tasks: - - subtask: Place the white bottle on the white board with the right gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Grasp the white bottle with the left gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the white bottle the right gripper - subtask_index: 5 - - subtask: Place the white bottle on the white board with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 16815 - dataset_size: 493.20 MB - data_structure: "Airbot_MMK2_move_medicine_bottle_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_pan.yaml b/dataset_info/Airbot_MMK2_move_pan.yaml index aaddf4217192994cdcefd0a7d53fc218f966ccc1..1cb693141c3c8dec1bb431c87d542eb0219b7181 100644 --- a/dataset_info/Airbot_MMK2_move_pan.yaml +++ b/dataset_info/Airbot_MMK2_move_pan.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pan level1: cookware level2: pan @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: push the pan to the edge of the table with your left hand and place - it on the building block with your right hand. +task_instruction: +- push the pan to the edge of the table with your left hand and place it on the building + block with your right hand. sub_tasks: -- Grasp the frying pan with the right gripper -- Abnormal -- Place the frying pan on the red cube block with the right gripper -- Push the frying pan on left to right with the left gripper -- End -- 'null' +- subtask: Grasp the frying pan with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Place the frying pan on the red cube block with the right gripper + subtask_index: 2 +- subtask: Push the frying pan on left to right with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - push - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 14881 fps: 30 @@ -120,11 +123,9 @@ data_structure: "Airbot_MMK2_move_pan_qced_hardlink/\n|-- annotations\n| |-- e | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,134 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_pan - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - push the pan to the edge of the table with your left hand and place it on the - building block with your right hand. - sub_tasks: - - subtask: Grasp the frying pan with the right gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Place the frying pan on the red cube block with the right gripper - subtask_index: 2 - - subtask: Push the frying pan on left to right with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - push - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 14881 - dataset_size: 764.06 MB - data_structure: "Airbot_MMK2_move_pan_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ - | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ - | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ - | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n\ - | `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ - | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ - \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |--\ - \ episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n\ - | |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| \ - \ `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n\ - | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ - \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ - \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ - \ `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_paper_box.yaml b/dataset_info/Airbot_MMK2_move_paper_box.yaml index 5da2e81d27eab15856defd3f81ddfadd6f5f3f18..7ae258fc7ba0996c1b3c3f69ed9eeb9be9d55179 100644 --- a/dataset_info/Airbot_MMK2_move_paper_box.yaml +++ b/dataset_info/Airbot_MMK2_move_paper_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_box level1: home_storage level2: paper_box @@ -45,31 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the cardboard box with both hands and place it on the lid. +task_instruction: +- pick up the cardboard box with both hands and place it on the lid. sub_tasks: -- End -- Place the phone case box on the white lid with the right gripper -- Grasp the mouse box with the left gripper -- Abnormal -- Static -- Grasp the phone case box with the right gripper -- Place the mouse box on the white lid with the left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the phone case box on the white lid with the right gripper + subtask_index: 1 +- subtask: Grasp the mouse box with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: Grasp the phone case box with the right gripper + subtask_index: 5 +- subtask: Place the mouse box on the white lid with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 5070 fps: 30 @@ -180,11 +185,9 @@ data_structure: 'Airbot_MMK2_move_paper_box_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -454,7 +457,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -462,7 +465,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -489,195 +491,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_paper_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the cardboard box with both hands and place it on the lid. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the phone case box on the white lid with the right gripper - subtask_index: 1 - - subtask: Grasp the mouse box with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: Grasp the phone case box with the right gripper - subtask_index: 5 - - subtask: Place the mouse box on the white lid with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5070 - dataset_size: 190.01 MB - data_structure: 'Airbot_MMK2_move_paper_box_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (35 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_phone_twice.yaml b/dataset_info/Airbot_MMK2_move_phone_twice.yaml index 8f5ad3f0a19e305b40675f953b44455ae0b1153b..66f7b2e706837f1319b67316181ebb9f82d43473 100644 --- a/dataset_info/Airbot_MMK2_move_phone_twice.yaml +++ b/dataset_info/Airbot_MMK2_move_phone_twice.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: mobile_phone level1: appliances level2: mobile_phone @@ -45,32 +45,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the mobile phone on the box with your left hand and take it - off with your right hand. +task_instruction: +- place the mobile phone on the box with your left hand and take it off with your + right hand. sub_tasks: -- Grasp the telephone with the right gripper -- End -- Abnormal -- Place the telephone on the box with the left gripper -- Static -- Grasp the telephone with the left gripper -- Place the telephone on the table with the right gripper -- 'null' +- subtask: Grasp the telephone with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Place the telephone on the box with the left gripper + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: Grasp the telephone with the left gripper + subtask_index: 5 +- subtask: Place the telephone on the table with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +88,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 9795 fps: 30 @@ -122,11 +127,9 @@ data_structure: "Airbot_MMK2_move_phone_twice_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,137 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_phone_twice - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the mobile phone on the box with your left hand and take it off with your - right hand. - sub_tasks: - - subtask: Grasp the telephone with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Place the telephone on the box with the left gripper - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: Grasp the telephone with the left gripper - subtask_index: 5 - - subtask: Place the telephone on the table with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9795 - dataset_size: 512.21 MB - data_structure: "Airbot_MMK2_move_phone_twice_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_sword_doll.yaml b/dataset_info/Airbot_MMK2_move_sword_doll.yaml index c74e1b91b38fa5422ac8ce3771c21d62b3279a81..59483686a4c6f931c927c93b0e66580ce14e8e3e 100644 --- a/dataset_info/Airbot_MMK2_move_sword_doll.yaml +++ b/dataset_info/Airbot_MMK2_move_sword_doll.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_basket level1: home_storage level2: white_basket @@ -45,30 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the shark doll on the white box. +task_instruction: +- put the shark doll on the white box. sub_tasks: -- Place the shark dagger on the white basket with the right gripper -- Grasp the shark dagger with the left gripper -- Place the shark dagger on the white basket with the left gripper -- Abnormal -- Grasp the shark dagger with the right gripper -- End -- 'null' +- subtask: Place the shark dagger on the white basket with the right gripper + subtask_index: 0 +- subtask: Grasp the shark dagger with the left gripper + subtask_index: 1 +- subtask: Place the shark dagger on the white basket with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Grasp the shark dagger with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - lift - lower -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 5383 fps: 30 @@ -120,11 +124,9 @@ data_structure: "Airbot_MMK2_move_sword_doll_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,134 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_sword_doll - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the shark doll on the white box. - sub_tasks: - - subtask: Place the shark dagger on the white basket with the right gripper - subtask_index: 0 - - subtask: Grasp the shark dagger with the left gripper - subtask_index: 1 - - subtask: Place the shark dagger on the white basket with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Grasp the shark dagger with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - lift - - lower - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5383 - dataset_size: 226.25 MB - data_structure: "Airbot_MMK2_move_sword_doll_qced_hardlink/\n|-- annotations\n|\ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_move_tennis_racket_ball.yaml b/dataset_info/Airbot_MMK2_move_tennis_racket_ball.yaml index b0c9e36c9ae630ca896e290248378d78104d8bff..6c8fb7377bd22f345a66724ab39e2fb777e25d7d 100644 --- a/dataset_info/Airbot_MMK2_move_tennis_racket_ball.yaml +++ b/dataset_info/Airbot_MMK2_move_tennis_racket_ball.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: tennis_racket level1: toys level2: tennis_racket @@ -45,32 +45,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the racket with your left hand, place the ball on the racket - with your right hand, and then put down the racket. +task_instruction: +- pick up the racket with your left hand, place the ball on the racket with your right + hand, and then put down the racket. sub_tasks: -- Lift the racket with the left gripper -- Abnormal -- Place the ball on the racket with the right gripper -- Place the racket on the table with the left gripper -- End -- Grasp the racket with the left gripper -- Grasp the ball with the right gripper -- 'null' +- subtask: Lift the racket with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Place the ball on the racket with the right gripper + subtask_index: 2 +- subtask: Place the racket on the table with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the racket with the left gripper + subtask_index: 5 +- subtask: Grasp the ball with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +88,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 10833 fps: 30 @@ -122,11 +127,9 @@ data_structure: "Airbot_MMK2_move_tennis_racket_ball_qced_hardlink/\n|-- annotat \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,137 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_tennis_racket_ball - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: leisure_plaza - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the racket with your left hand, place the ball on the racket with your - right hand, and then put down the racket. - sub_tasks: - - subtask: Lift the racket with the left gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Place the ball on the racket with the right gripper - subtask_index: 2 - - subtask: Place the racket on the table with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the racket with the left gripper - subtask_index: 5 - - subtask: Grasp the ball with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10833 - dataset_size: 455.12 MB - data_structure: "Airbot_MMK2_move_tennis_racket_ball_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_tub.yaml b/dataset_info/Airbot_MMK2_move_tub.yaml index 953a73867856ac0c7704239e17257156e7e67d77..ad15c3f6ffd89278be169801ee5dbbb2fcc2b8ea 100644 --- a/dataset_info/Airbot_MMK2_move_tub.yaml +++ b/dataset_info/Airbot_MMK2_move_tub.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: plate level1: kitchen_supplies level2: plate @@ -39,29 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pass the plate to your right hand with your left hand and then put - it down. +task_instruction: +- pass the plate to your right hand with your left hand and then put it down. sub_tasks: -- Grasp the white box with left gripper -- Place the white box in the center of table with right gripper -- End -- Pass the white box to right gripper -- 'null' +- subtask: Grasp the white box with left gripper + subtask_index: 0 +- subtask: Place the white box in the center of table with right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Pass the white box to right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -70,13 +75,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -84,8 +86,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 37 total_frames: 11634 fps: 30 @@ -112,11 +113,9 @@ data_structure: "Airbot_MMK2_move_tub_qced_hardlink/\n|-- annotations\n| |-- e | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:36 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -386,7 +385,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -394,7 +393,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -421,130 +419,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_tub - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pass the plate to your right hand with your left hand and then put it down. - sub_tasks: - - subtask: Grasp the white box with left gripper - subtask_index: 0 - - subtask: Place the white box in the center of table with right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Pass the white box to right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11634 - dataset_size: 340.31 MB - data_structure: "Airbot_MMK2_move_tub_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ - | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ - | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ - | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n\ - | `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ - | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ - \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |--\ - \ episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n\ - | |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| \ - \ `-- episode_000011.parquet\n| `-- ... (25 more entries)\n|-- meta\n\ - | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ - \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ - \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ - \ `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_move_umbrella_tissues.yaml b/dataset_info/Airbot_MMK2_move_umbrella_tissues.yaml index 207e3cf4619cf5b0406e47621fed27edb8abdf04..76579d4b3350569abaae4d49a0302dbb8a736932 100644 --- a/dataset_info/Airbot_MMK2_move_umbrella_tissues.yaml +++ b/dataset_info/Airbot_MMK2_move_umbrella_tissues.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -57,29 +57,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the umbrella and tissues on the lid. +task_instruction: +- put the umbrella and tissues on the lid. sub_tasks: -- Grasp the umbrella with the left gripper -- Grasp the tissue with the right gripper -- Place the tissue on the white lid with the right gripper -- Place the umbrella on the white lid with the left gripper -- End -- 'null' +- subtask: Grasp the umbrella with the left gripper + subtask_index: 0 +- subtask: Grasp the tissue with the right gripper + subtask_index: 1 +- subtask: Place the tissue on the white lid with the right gripper + subtask_index: 2 +- subtask: Place the umbrella on the white lid with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - garsp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -88,13 +95,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -102,8 +106,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 8709 fps: 30 @@ -131,11 +134,9 @@ data_structure: "Airbot_MMK2_move_umbrella_tissues_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -405,7 +406,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -413,7 +414,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -440,132 +440,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_move_umbrella_tissues - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the umbrella and tissues on the lid. - sub_tasks: - - subtask: Grasp the umbrella with the left gripper - subtask_index: 0 - - subtask: Grasp the tissue with the right gripper - subtask_index: 1 - - subtask: Place the tissue on the white lid with the right gripper - subtask_index: 2 - - subtask: Place the umbrella on the white lid with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - garsp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8709 - dataset_size: 327.64 MB - data_structure: "Airbot_MMK2_move_umbrella_tissues_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_open_door_left.yaml b/dataset_info/Airbot_MMK2_open_door_left.yaml index d49e6d6d2d9f8ddcd4005a72fa0fd135a47fd322..5d7b50f4ee2b1b7100575cb6230c4c1300665d56 100644 --- a/dataset_info/Airbot_MMK2_open_door_left.yaml +++ b/dataset_info/Airbot_MMK2_open_door_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_open_door_left dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household - level2: study_room + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cabinet level1: furniture level2: cabinet @@ -36,25 +39,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the cabinet. +task_instruction: +- open the cabinet. sub_tasks: -- Touch the door with the left gripper -- Open the door with the left gripper -- End -- 'null' +- subtask: Touch the door with the left gripper + subtask_index: 0 +- subtask: Open the door with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - open -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -63,13 +71,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -77,8 +82,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 6418 fps: 30 @@ -105,11 +109,9 @@ data_structure: "Airbot_MMK2_open_door_left_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -379,7 +381,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -387,7 +389,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -414,126 +415,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_open_door_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the cabinet. - sub_tasks: - - subtask: Touch the door with the left gripper - subtask_index: 0 - - subtask: Open the door with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - open - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6418 - dataset_size: 188.62 MB - data_structure: "Airbot_MMK2_open_door_left_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_open_door_right.yaml b/dataset_info/Airbot_MMK2_open_door_right.yaml index 69e6e16790a80d32f443238c141099084a825786..8e37a8d845015a083e4051dee83b1d19bcd39aa0 100644 --- a/dataset_info/Airbot_MMK2_open_door_right.yaml +++ b/dataset_info/Airbot_MMK2_open_door_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cabinet level1: home_storage level2: cabinet @@ -39,27 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the cabinet. +task_instruction: +- open the cabinet. sub_tasks: -- End -- Open the door with the right gripper -- Touch the door handle with the right gripper -- Touch the door with the right gripper -- Abnormal -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Open the door with the right gripper + subtask_index: 1 +- subtask: Touch the door handle with the right gripper + subtask_index: 2 +- subtask: Touch the door with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - open -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -68,13 +75,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -82,8 +86,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 31753 fps: 30 @@ -111,11 +114,9 @@ data_structure: "Airbot_MMK2_open_door_right_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -385,7 +386,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -393,7 +394,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -420,130 +420,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_open_door_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: laboratory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the cabinet. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Open the door with the right gripper - subtask_index: 1 - - subtask: Touch the door handle with the right gripper - subtask_index: 2 - - subtask: Touch the door with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - open - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 31753 - dataset_size: 1.03 GB - data_structure: "Airbot_MMK2_open_door_right_qced_hardlink/\n|-- annotations\n|\ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_open_laptop.yaml b/dataset_info/Airbot_MMK2_open_laptop.yaml index 9568396b5fa5ed9c8a79b258714f6222780ea24f..13746dae90bd4b00abd968b0cb86f52a2f9478f4 100644 --- a/dataset_info/Airbot_MMK2_open_laptop.yaml +++ b/dataset_info/Airbot_MMK2_open_laptop.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: laptop level1: appliances level2: laptop @@ -39,30 +39,37 @@ objects: &id006 level5:operation_platform_height: 77.2 task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: press the laptop with your right hand and then open it with your - left hand. +task_instruction: +- press the laptop with your right hand and then open it with your left hand. sub_tasks: -- Press the laptop with the right gripper -- Open the laptop with the left gripper -- Static -- Release the laptop with the right gripper -- End -- Grasp the laptop with the left gripper -- 'null' +- subtask: Press the laptop with the right gripper + subtask_index: 0 +- subtask: Open the laptop with the left gripper + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Release the laptop with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the laptop with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - open -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +78,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +89,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 11734 fps: 30 @@ -113,11 +116,9 @@ data_structure: "Airbot_MMK2_open_laptop_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -387,7 +388,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -395,7 +396,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -422,133 +422,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_open_laptop - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - press the laptop with your right hand and then open it with your left hand. - sub_tasks: - - subtask: Press the laptop with the right gripper - subtask_index: 0 - - subtask: Open the laptop with the left gripper - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: Release the laptop with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the laptop with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - open - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11734 - dataset_size: 432.71 MB - data_structure: "Airbot_MMK2_open_laptop_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_open_lid.yaml b/dataset_info/Airbot_MMK2_open_lid.yaml index 8599365b98e58e5fdcae3467abc09e839f598ffb..28c8918447fcbc0bd10bb6ce5b97a29341b369d7 100644 --- a/dataset_info/Airbot_MMK2_open_lid.yaml +++ b/dataset_info/Airbot_MMK2_open_lid.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: box level1: home_storage level2: box @@ -39,29 +39,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the lid of the box with both hands. +task_instruction: +- open the lid of the box with both hands. sub_tasks: -- Open the box lid with the right gripper -- Touch the box lid with the left gripper -- Press the box with the left gripper -- End -- Open the box lid with the left gripper -- Touch the box lid with the right gripper -- Abnormal -- 'null' +- subtask: Open the box lid with the right gripper + subtask_index: 0 +- subtask: Touch the box lid with the left gripper + subtask_index: 1 +- subtask: Press the box with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Open the box lid with the left gripper + subtask_index: 4 +- subtask: Touch the box lid with the right gripper + subtask_index: 5 +- subtask: Abnormal + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - open -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -70,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -84,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 198 total_frames: 28684 fps: 30 @@ -112,11 +117,9 @@ data_structure: "Airbot_MMK2_open_lid_qced_hardlink/\n|-- annotations\n| |-- e | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:197 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -386,7 +389,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -394,7 +397,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -421,134 +423,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_open_lid - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: laboratory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the lid of the box with both hands. - sub_tasks: - - subtask: Open the box lid with the right gripper - subtask_index: 0 - - subtask: Touch the box lid with the left gripper - subtask_index: 1 - - subtask: Press the box with the left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Open the box lid with the left gripper - subtask_index: 4 - - subtask: Touch the box lid with the right gripper - subtask_index: 5 - - subtask: Abnormal - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - open - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 28684 - dataset_size: 974.53 MB - data_structure: "Airbot_MMK2_open_lid_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ - | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ - | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ - | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n\ - | `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ - | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ - \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |--\ - \ episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n\ - | |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| \ - \ `-- episode_000011.parquet\n| `-- ... (186 more entries)\n|-- meta\n\ - | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ - \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ - \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ - \ `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_organize_plate.yaml b/dataset_info/Airbot_MMK2_organize_plate.yaml index 6e6f82f8749757b5db2d3fc4657d7d6060adff57..e67757c3e43ec9a60e9dea0d47da720f74970037 100644 --- a/dataset_info/Airbot_MMK2_organize_plate.yaml +++ b/dataset_info/Airbot_MMK2_organize_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: round_plate level1: kitchen_supplies level2: plates @@ -45,32 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use both hands to place the stacked plates on the table onto the - shelf. +task_instruction: +- use both hands to place the stacked plates on the table onto the shelf. sub_tasks: -- Press the plate with the left gripper -- End -- Place the plate into the front mezzanine of the shelf with the right gripper -- Press the plate and push it to the right with the left gripper -- Abnormal -- Place the plate into the middle mezzanine of the shelf with the right gripper -- Grasp the plate with the right gripper -- 'null' +- subtask: Press the plate with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the plate into the front mezzanine of the shelf with the right gripper + subtask_index: 2 +- subtask: Press the plate and push it to the right with the left gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Place the plate into the middle mezzanine of the shelf with the right gripper + subtask_index: 5 +- subtask: Grasp the plate with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 595 total_frames: 338107 fps: 30 @@ -121,11 +125,9 @@ data_structure: "Airbot_MMK2_organize_plate_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:594 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +397,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +405,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,138 +431,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_organize_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitichen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use both hands to place the stacked plates on the table onto the shelf. - sub_tasks: - - subtask: Press the plate with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the plate into the front mezzanine of the shelf with the right - gripper - subtask_index: 2 - - subtask: Press the plate and push it to the right with the left gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Place the plate into the middle mezzanine of the shelf with the right - gripper - subtask_index: 5 - - subtask: Grasp the plate with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 338107 - dataset_size: 14.21 GB - data_structure: "Airbot_MMK2_organize_plate_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (583 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Airbot_MMK2_pass_paper_box.yaml b/dataset_info/Airbot_MMK2_pass_paper_box.yaml index 3d93f825ec3900b822efb091ff33e1b336b2316a..3be9169c7910011f30bc4da53ca136f2e292b01b 100644 --- a/dataset_info/Airbot_MMK2_pass_paper_box.yaml +++ b/dataset_info/Airbot_MMK2_pass_paper_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_box level1: laboratory_supplies level2: paper_box @@ -39,33 +39,43 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the book with your left hand, hand it to your right hand - and then put it down. +task_instruction: +- pick up the book with your left hand, hand it to your right hand and then put it + down. sub_tasks: -- Place the mouse box on the white lid with the left gripper -- Grasp the calculator case with left gripper -- Place the phone case box on the white lid with the right gripper -- Grasp the mouse box with the left gripper -- Grasp the phone case box with the right gripper -- Pass the calculator case to right gripper -- Place the calculator case in the center of table with right gripper -- End -- 'null' +- subtask: Place the mouse box on the white lid with the left gripper + subtask_index: 0 +- subtask: Grasp the calculator case with left gripper + subtask_index: 1 +- subtask: Place the phone case box on the white lid with the right gripper + subtask_index: 2 +- subtask: Grasp the mouse box with the left gripper + subtask_index: 3 +- subtask: Grasp the phone case box with the right gripper + subtask_index: 4 +- subtask: Pass the calculator case to right gripper + subtask_index: 5 +- subtask: Place the calculator case in the center of table with right gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -74,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -88,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 46 total_frames: 8948 fps: 30 @@ -176,11 +182,9 @@ data_structure: 'Airbot_MMK2_pass_paper_box_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:45 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -450,7 +454,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -458,7 +462,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -485,198 +488,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_pass_paper_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the book with your left hand, hand it to your right hand and then put - it down. - sub_tasks: - - subtask: Place the mouse box on the white lid with the left gripper - subtask_index: 0 - - subtask: Grasp the calculator case with left gripper - subtask_index: 1 - - subtask: Place the phone case box on the white lid with the right gripper - subtask_index: 2 - - subtask: Grasp the mouse box with the left gripper - subtask_index: 3 - - subtask: Grasp the phone case box with the right gripper - subtask_index: 4 - - subtask: Pass the calculator case to right gripper - subtask_index: 5 - - subtask: Place the calculator case in the center of table with right gripper - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8948 - dataset_size: 306.64 MB - data_structure: 'Airbot_MMK2_pass_paper_box_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (34 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_pick_up_and_place_tub.yaml b/dataset_info/Airbot_MMK2_pick_up_and_place_tub.yaml index 201d260f48e486e7bddc0b4bc2d14092b38eb75e..9fbcdd5fd9a298c99170ee51a98859ad195f39d2 100644 --- a/dataset_info/Airbot_MMK2_pick_up_and_place_tub.yaml +++ b/dataset_info/Airbot_MMK2_pick_up_and_place_tub.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: washbasin level1: home_storage level2: washbasin @@ -39,32 +39,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the washbasin and put it down. +task_instruction: +- pick up the washbasin and put it down. sub_tasks: -- End -- Lift the basin with the left gripper -- Grasp the basin with the left gripper -- Grasp the basin with the right gripper -- Place basin on the table with the right gripper -- Place basin on the table with the left gripper -- Abnormal -- Lift the basin with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Lift the basin with the left gripper + subtask_index: 1 +- subtask: Grasp the basin with the left gripper + subtask_index: 2 +- subtask: Grasp the basin with the right gripper + subtask_index: 3 +- subtask: Place basin on the table with the right gripper + subtask_index: 4 +- subtask: Place basin on the table with the left gripper + subtask_index: 5 +- subtask: Abnormal + subtask_index: 6 +- subtask: Lift the basin with the right gripper + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 16632 fps: 30 @@ -116,11 +122,9 @@ data_structure: "Airbot_MMK2_pick_up_and_place_tub_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -390,7 +394,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -398,7 +402,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -425,138 +428,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_pick_up_and_place_tub - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bathroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the washbasin and put it down. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Lift the basin with the left gripper - subtask_index: 1 - - subtask: Grasp the basin with the left gripper - subtask_index: 2 - - subtask: Grasp the basin with the right gripper - subtask_index: 3 - - subtask: Place basin on the table with the right gripper - subtask_index: 4 - - subtask: Place basin on the table with the left gripper - subtask_index: 5 - - subtask: Abnormal - subtask_index: 6 - - subtask: Lift the basin with the right gripper - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 16632 - dataset_size: 607.45 MB - data_structure: "Airbot_MMK2_pick_up_and_place_tub_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_play_guitar.yaml b/dataset_info/Airbot_MMK2_play_guitar.yaml index 07100455a42ef0b9fdb850a35b53a5d23496bdbb..131d3820eca0decdc5780356f969233b2c072111 100644 --- a/dataset_info/Airbot_MMK2_play_guitar.yaml +++ b/dataset_info/Airbot_MMK2_play_guitar.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the guitar with your left hand, play it with your right - hand, and then put it down with your left hand. +task_instruction: +- pick up the guitar with your left hand, play it with your right hand, and then put + it down with your left hand. sub_tasks: -- Lift the ukulele with the left gripper -- End -- Play ukulele with the right gripper -- Grasp the ukulele with the left gripper -- Place the ukulele on the table with the left gripper -- 'null' +- subtask: Lift the ukulele with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Play ukulele with the right gripper + subtask_index: 2 +- subtask: Grasp the ukulele with the left gripper + subtask_index: 3 +- subtask: Place the ukulele on the table with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 94 total_frames: 23327 fps: 30 @@ -125,11 +128,9 @@ data_structure: "Airbot_MMK2_play_guitar_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:93 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -399,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -407,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -434,133 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_play_guitar - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: after-school_tutoring_center - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the guitar with your left hand, play it with your right hand, and then - put it down with your left hand. - sub_tasks: - - subtask: Lift the ukulele with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Play ukulele with the right gripper - subtask_index: 2 - - subtask: Grasp the ukulele with the left gripper - subtask_index: 3 - - subtask: Place the ukulele on the table with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 23327 - dataset_size: 944.70 MB - data_structure: "Airbot_MMK2_play_guitar_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (82 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_play_toy_piano.yaml b/dataset_info/Airbot_MMK2_play_toy_piano.yaml index 3060d4a900f1edfe4dd3f4f9bf8e8db3e087d29f..0dfdb65ba0a32b8906ca45f4292b8cddf6089a71 100644 --- a/dataset_info/Airbot_MMK2_play_toy_piano.yaml +++ b/dataset_info/Airbot_MMK2_play_toy_piano.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the piano in the middle with your left hand and play the - piano with your right hand. +task_instruction: +- pick up the piano in the middle with your left hand and play the piano with your + right hand. sub_tasks: -- Play the toy piano with the right gripper -- Place the toy piano on the table with the left gripper -- End -- Grasp the toy piano with the left gripper -- Lift the toy piano with the left gripper -- 'null' +- subtask: Play the toy piano with the right gripper + subtask_index: 0 +- subtask: Place the toy piano on the table with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the toy piano with the left gripper + subtask_index: 3 +- subtask: Lift the toy piano with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place - press -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 7683 fps: 30 @@ -120,11 +123,9 @@ data_structure: "Airbot_MMK2_play_toy_piano_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,134 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_play_toy_piano - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the piano in the middle with your left hand and play the piano with your - right hand. - sub_tasks: - - subtask: Play the toy piano with the right gripper - subtask_index: 0 - - subtask: Place the toy piano on the table with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the toy piano with the left gripper - subtask_index: 3 - - subtask: Lift the toy piano with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - - press - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7683 - dataset_size: 230.15 MB - data_structure: "Airbot_MMK2_play_toy_piano_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_pour_BBs.yaml b/dataset_info/Airbot_MMK2_pour_BBs.yaml index 9a002c3a4196b191d9af77d57e1539dcca181f64..dca784a59eeaf54bf0b5150fb6f0740e39e8a5bc 100644 --- a/dataset_info/Airbot_MMK2_pour_BBs.yaml +++ b/dataset_info/Airbot_MMK2_pour_BBs.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_pour_BBs dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: other - level2: laboratory + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_cup level1: kitchen_supplies level2: paper_cup @@ -48,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: grab the paper cup and pour the bullets inside into the bowl. +task_instruction: +- grab the paper cup and pour the bullets inside into the bowl. sub_tasks: -- Grasp the paper cup containing bullets with right gripper -- End -- Abnormal -- Static -- Pour the bullets from the paper cup into the bowl with right gripper -- Place the paper cup on the table with right gripper -- 'null' +- subtask: Grasp the paper cup containing bullets with right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Pour the bullets from the paper cup into the bowl with right gripper + subtask_index: 4 +- subtask: Place the paper cup on the table with right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place - pour -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 19529 fps: 30 @@ -123,11 +130,9 @@ data_structure: "Airbot_MMK2_pour_BBs_qced_hardlink/\n|-- annotations\n| |-- e | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -397,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -405,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -432,135 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_pour_BBs - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - grab the paper cup and pour the bullets inside into the bowl. - sub_tasks: - - subtask: Grasp the paper cup containing bullets with right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Pour the bullets from the paper cup into the bowl with right gripper - subtask_index: 4 - - subtask: Place the paper cup on the table with right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 19529 - dataset_size: 726.21 MB - data_structure: "Airbot_MMK2_pour_BBs_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ - | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ - | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ - | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n\ - | `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ - | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ - \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |--\ - \ episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n\ - | |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| \ - \ `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n\ - | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ - \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ - \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ - \ `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_prepare_tea.yaml b/dataset_info/Airbot_MMK2_prepare_tea.yaml index c2e5b74ba10a3daf4807431dfaab5b508093dc70..7a10919a50d48a01fdc8ba7ea037efaca31d6082 100644 --- a/dataset_info/Airbot_MMK2_prepare_tea.yaml +++ b/dataset_info/Airbot_MMK2_prepare_tea.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: small_teapot level1: teacus level2: small_teapot @@ -57,33 +57,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: with the right hand, take out the tea leaves from the tea canister - and put them into the inner pot of the teapot. with the left hand, put the inner - pot of the teapot into the teapot and then close the lid of the teapot. +task_instruction: +- with the right hand, take out the tea leaves from the tea canister and put them + into the inner pot of the teapot. with the left hand, put the inner pot of the teapot + into the teapot and then close the lid of the teapot. sub_tasks: -- Place the tea leaves into the tea strainer with the right gripper -- End -- Close the teapot lid with the left gripper -- Grasp the tea strainer with the left gripper -- Abnormal -- Place the tea strainer into the teapot with the left gripper -- Grasp the tea leaves with the right gripper -- 'null' +- subtask: Place the tea leaves into the tea strainer with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Close the teapot lid with the left gripper + subtask_index: 2 +- subtask: Grasp the tea strainer with the left gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Place the tea strainer into the teapot with the left gripper + subtask_index: 5 +- subtask: Grasp the tea leaves with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -92,13 +101,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -106,8 +112,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 89 total_frames: 70954 fps: 30 @@ -134,11 +139,9 @@ data_structure: "Airbot_MMK2_prepare_tea_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:88 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -408,7 +411,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -416,7 +419,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -443,138 +445,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_prepare_tea - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - with the right hand, take out the tea leaves from the tea canister and put them - into the inner pot of the teapot. with the left hand, put the inner pot of the - teapot into the teapot and then close the lid of the teapot. - sub_tasks: - - subtask: Place the tea leaves into the tea strainer with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Close the teapot lid with the left gripper - subtask_index: 2 - - subtask: Grasp the tea strainer with the left gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Place the tea strainer into the teapot with the left gripper - subtask_index: 5 - - subtask: Grasp the tea leaves with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 70954 - dataset_size: 2.99 GB - data_structure: "Airbot_MMK2_prepare_tea_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (77 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_pull_plunger.yaml b/dataset_info/Airbot_MMK2_pull_plunger.yaml index 697e4654f8426d69e8f21641633d865eb7ac0e1b..d43383cce9a3f8271f2a0692913d8ce4fd83bec2 100644 --- a/dataset_info/Airbot_MMK2_pull_plunger.yaml +++ b/dataset_info/Airbot_MMK2_pull_plunger.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -45,32 +45,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the syringe with your left hand and pull the piston with - your right hand. +task_instruction: +- pick up the syringe with your left hand and pull the piston with your right hand. sub_tasks: -- Lift the syringe with the left gripper -- Pull the piston with the right gripper -- End -- Grasp the syringe with the left gripper -- Grasp the piston with the right gripper -- Place the syringe on the table with the left gripper -- 'null' +- subtask: Lift the syringe with the left gripper + subtask_index: 0 +- subtask: Pull the piston with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the syringe with the left gripper + subtask_index: 3 +- subtask: Grasp the piston with the right gripper + subtask_index: 4 +- subtask: Place the syringe on the table with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place - pull -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +86,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +97,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 97 total_frames: 26077 fps: 30 @@ -121,11 +124,9 @@ data_structure: "Airbot_MMK2_pull_plunger_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:96 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,135 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_pull_plunger - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: medical_healthcare - level2: hospital - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the syringe with your left hand and pull the piston with your right hand. - sub_tasks: - - subtask: Lift the syringe with the left gripper - subtask_index: 0 - - subtask: Pull the piston with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the syringe with the left gripper - subtask_index: 3 - - subtask: Grasp the piston with the right gripper - subtask_index: 4 - - subtask: Place the syringe on the table with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - - pull - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 26077 - dataset_size: 1002.56 MB - data_structure: "Airbot_MMK2_pull_plunger_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (85 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_pull_tissue.yaml b/dataset_info/Airbot_MMK2_pull_tissue.yaml index c3ba294dd6a1538f477a6a2b770bb585f8a88405..ce3a8b861a1d071f30c3d7d3202c0c2ae9da7719 100644 --- a/dataset_info/Airbot_MMK2_pull_tissue.yaml +++ b/dataset_info/Airbot_MMK2_pull_tissue.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: tissue_paper level1: kitchen_supplies level2: paper_towels @@ -45,29 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take a tissue paper out of tissue box by hand. +task_instruction: +- take a tissue paper out of tissue box by hand. sub_tasks: -- Place the tissue on the table with the right gripper -- End -- Pull out a piece of tissue with the right gripper -- Abnormal -- Grasp the a piece of tissue with the right gripper -- 'null' +- subtask: Place the tissue on the table with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Pull out a piece of tissue with the right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Grasp the a piece of tissue with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 11531 fps: 30 @@ -118,11 +121,9 @@ data_structure: "Airbot_MMK2_pull_tissue_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,132 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_pull_tissue - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take a tissue paper out of tissue box by hand. - sub_tasks: - - subtask: Place the tissue on the table with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Pull out a piece of tissue with the right gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Grasp the a piece of tissue with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11531 - dataset_size: 405.79 MB - data_structure: "Airbot_MMK2_pull_tissue_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_push_away_book.yaml b/dataset_info/Airbot_MMK2_push_away_book.yaml index 354bc9ceb0821916ece2b360179432e463e904a4..6e9cb78cd61e12281aa6abd739cd9764a4d42a8a 100644 --- a/dataset_info/Airbot_MMK2_push_away_book.yaml +++ b/dataset_info/Airbot_MMK2_push_away_book.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: book level1: stationery level2: book @@ -39,28 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pull out a book by hand. +task_instruction: +- pull out a book by hand. sub_tasks: -- End -- Abnormal -- Lay the book down with the right gripper -- Hold the book with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Lay the book down with the right gripper + subtask_index: 2 +- subtask: Hold the book with the right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - pinch - clip - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -69,13 +75,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -83,8 +86,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 244 total_frames: 36575 fps: 30 @@ -111,11 +113,9 @@ data_structure: "Airbot_MMK2_push_away_book_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:243 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -385,7 +385,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -393,7 +393,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -420,130 +419,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_push_away_book - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pull out a book by hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Lay the book down with the right gripper - subtask_index: 2 - - subtask: Hold the book with the right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - pinch - - clip - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 36575 - dataset_size: 1.40 GB - data_structure: "Airbot_MMK2_push_away_book_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (232 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_push_plunger.yaml b/dataset_info/Airbot_MMK2_push_plunger.yaml index 947183879c352e268872e25ec299450086c775bf..aeb9b9e4dfd03cac4cdb9c65d14822315755b0e1 100644 --- a/dataset_info/Airbot_MMK2_push_plunger.yaml +++ b/dataset_info/Airbot_MMK2_push_plunger.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -45,33 +45,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the syringe with your left hand, then push it back with - your right hand, and finally lower it with your left hand. +task_instruction: +- pick up the syringe with your left hand, then push it back with your right hand, + and finally lower it with your left hand. sub_tasks: -- Lift the syringe with the left gripper -- Push the piston with the right gripper -- End -- Grasp the syringe with the left gripper -- Grasp the piston with the right gripper -- Place the syringe on the table with the left gripper -- Abnormal -- 'null' +- subtask: Lift the syringe with the left gripper + subtask_index: 0 +- subtask: Push the piston with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the syringe with the left gripper + subtask_index: 3 +- subtask: Grasp the piston with the right gripper + subtask_index: 4 +- subtask: Place the syringe on the table with the left gripper + subtask_index: 5 +- subtask: Abnormal + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 147 total_frames: 47091 fps: 30 @@ -122,11 +127,9 @@ data_structure: "Airbot_MMK2_push_plunger_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:146 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,138 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_push_plunger - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: medical_healthcare - level2: hospital - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the syringe with your left hand, then push it back with your right hand, - and finally lower it with your left hand. - sub_tasks: - - subtask: Lift the syringe with the left gripper - subtask_index: 0 - - subtask: Push the piston with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the syringe with the left gripper - subtask_index: 3 - - subtask: Grasp the piston with the right gripper - subtask_index: 4 - - subtask: Place the syringe on the table with the left gripper - subtask_index: 5 - - subtask: Abnormal - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 47091 - dataset_size: 1.65 GB - data_structure: "Airbot_MMK2_push_plunger_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (135 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_push_toy_car.yaml b/dataset_info/Airbot_MMK2_push_toy_car.yaml index ef9e1c521625ac766e36d2d56df2583f0dd1f9c8..7ceeb6eca5905a1788127ad91e903cd62fb692ce 100644 --- a/dataset_info/Airbot_MMK2_push_toy_car.yaml +++ b/dataset_info/Airbot_MMK2_push_toy_car.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -45,26 +45,32 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: push the toy car by hand. +task_instruction: +- push the toy car by hand. sub_tasks: -- End -- Static -- Push the toy car from left to right with the left gripper -- Abnormal -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Static + subtask_index: 1 +- subtask: Push the toy car from left to right with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 6897 fps: 30 @@ -115,11 +117,9 @@ data_structure: "Airbot_MMK2_push_toy_car_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -389,7 +389,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -397,7 +397,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -424,128 +423,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_push_toy_car - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - push the toy car by hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Static - subtask_index: 1 - - subtask: Push the toy car from left to right with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6897 - dataset_size: 175.81 MB - data_structure: "Airbot_MMK2_push_toy_car_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_remove_lid.yaml b/dataset_info/Airbot_MMK2_remove_lid.yaml index b93b169c40b2277ac942091d907fbbb7211ddf71..fe222d1290208a22a187b16cbe00b3bab6207932 100644 --- a/dataset_info/Airbot_MMK2_remove_lid.yaml +++ b/dataset_info/Airbot_MMK2_remove_lid.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_remove_lid dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: other - level2: laboratory + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: lid level1: storage_utensils level2: lid @@ -48,28 +51,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the lid of the foam box. +task_instruction: +- pick up the lid of the foam box. sub_tasks: -- Place the lid of the foam box on the table with your left gripper -- End -- Grasp the lid of the foam box with your left gripper -- Grasp the lid of the foam box with your right gripper -- Place the lid of the foam box on the table with your right gripper -- 'null' +- subtask: Place the lid of the foam box on the table with your left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the lid of the foam box with your left gripper + subtask_index: 2 +- subtask: Grasp the lid of the foam box with your right gripper + subtask_index: 3 +- subtask: Place the lid of the foam box on the table with your right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - lift - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +88,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 11589 fps: 30 @@ -120,11 +126,9 @@ data_structure: "Airbot_MMK2_remove_lid_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,131 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_remove_lid - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the lid of the foam box. - sub_tasks: - - subtask: Place the lid of the foam box on the table with your left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the lid of the foam box with your left gripper - subtask_index: 2 - - subtask: Grasp the lid of the foam box with your right gripper - subtask_index: 3 - - subtask: Place the lid of the foam box on the table with your right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - lift - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11589 - dataset_size: 403.42 MB - data_structure: "Airbot_MMK2_remove_lid_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_remove_pen_cap.yaml b/dataset_info/Airbot_MMK2_remove_pen_cap.yaml index 56c792fff4d9bda36df17b233b3bb9b1730f409b..0628d60601a01b4228f55ce9bf59bd4387e712bd 100644 --- a/dataset_info/Airbot_MMK2_remove_pen_cap.yaml +++ b/dataset_info/Airbot_MMK2_remove_pen_cap.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pen level1: stationery level2: pen @@ -39,30 +39,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the pen, remove the cap, and put it down. +task_instruction: +- pick up the pen, remove the cap, and put it down. sub_tasks: -- Place the pen cap on the table with the right gripper -- Grasp the pen tightly with the left gripper -- End -- Place the pen on the table with the left gripper -- Remove the pen cap with the right gripper -- Lift the pen with the left gripper -- 'null' +- subtask: Place the pen cap on the table with the right gripper + subtask_index: 0 +- subtask: Grasp the pen tightly with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the pen on the table with the left gripper + subtask_index: 3 +- subtask: Remove the pen cap with the right gripper + subtask_index: 4 +- subtask: Lift the pen with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 41147 fps: 30 @@ -113,11 +117,9 @@ data_structure: "Airbot_MMK2_remove_pen_cap_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -387,7 +389,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -395,7 +397,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -422,134 +423,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_remove_pen_cap - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: education - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the pen, remove the cap, and put it down. - sub_tasks: - - subtask: Place the pen cap on the table with the right gripper - subtask_index: 0 - - subtask: Grasp the pen tightly with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the pen on the table with the left gripper - subtask_index: 3 - - subtask: Remove the pen cap with the right gripper - subtask_index: 4 - - subtask: Lift the pen with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 41147 - dataset_size: 1.52 GB - data_structure: "Airbot_MMK2_remove_pen_cap_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_rotate_cube_face.yaml b/dataset_info/Airbot_MMK2_rotate_cube_face.yaml index 8869468bd26ad79a8dc6f6734f90be17490bfe90..fd0e700b4ebecf1028296bccde3a7a6ee94192af 100644 --- a/dataset_info/Airbot_MMK2_rotate_cube_face.yaml +++ b/dataset_info/Airbot_MMK2_rotate_cube_face.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: rubik's_cube level1: toys level2: rubik's_cube @@ -39,33 +39,43 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the rubik's cube with left hand, rotate it once with right - hand, and then put it down with left hand. +task_instruction: +- pick up the rubik's cube with left hand, rotate it once with right hand, and then + put it down with left hand. sub_tasks: -- Adjust the rubik's cube with the left gripper -- Grasp the rubik's cube with the left gripper -- End -- Place the rubik's cube on the table with the left gripper -- Move the rightmost combination block of the rubik's cube with the right gripper -- Abnormal -- Lift up the rubik's cube with the left gripper -- 'null' +- subtask: Adjust the rubik's cube with the left gripper + subtask_index: 0 +- subtask: Grasp the rubik's cube with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the rubik's cube on the table with the left gripper + subtask_index: 3 +- subtask: Move the rightmost combination block of the rubik's cube with the right + gripper + subtask_index: 4 +- subtask: Abnormal + subtask_index: 5 +- subtask: Lift up the rubik's cube with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place - filp -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -74,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -88,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 145 total_frames: 55057 fps: 30 @@ -117,11 +123,9 @@ data_structure: "Airbot_MMK2_rotate_cube_face_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:144 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -391,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -399,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -426,139 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_rotate_cube_face - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the rubik's cube with left hand, rotate it once with right hand, and then - put it down with left hand. - sub_tasks: - - subtask: Adjust the rubik's cube with the left gripper - subtask_index: 0 - - subtask: Grasp the rubik's cube with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the rubik's cube on the table with the left gripper - subtask_index: 3 - - subtask: Move the rightmost combination block of the rubik's cube with the right - gripper - subtask_index: 4 - - subtask: Abnormal - subtask_index: 5 - - subtask: Lift up the rubik's cube with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - - filp - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 55057 - dataset_size: 2.13 GB - data_structure: "Airbot_MMK2_rotate_cube_face_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (133 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_slide_block.yaml b/dataset_info/Airbot_MMK2_slide_block.yaml index de74e3e91ced273f15c95fd8c5aeb974816076ad..37c2caab5269c8cd1464b302ca8bf800f2a12c44 100644 --- a/dataset_info/Airbot_MMK2_slide_block.yaml +++ b/dataset_info/Airbot_MMK2_slide_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: educational_building_blocks level1: toys level2: educational_building_blocks @@ -39,32 +39,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: press the building block with right hand to move its position. +task_instruction: +- press the building block with right hand to move its position. sub_tasks: -- Move the orange cylinder to the right side of the box with right gripper -- End -- Move the blue cube to the upper right side of the box with right gripper -- Adjust the build blocks -- Move the green cylinder to the right side of the box with right gripper -- Move the wooden arched build blocks to the upper right side of the box with right - gripper -- Abnormal -- Move the yellow cube to the upper right side of the box with right gripper -- 'null' +- subtask: Move the orange cylinder to the right side of the box with right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Move the blue cube to the upper right side of the box with right gripper + subtask_index: 2 +- subtask: Adjust the build blocks + subtask_index: 3 +- subtask: Move the green cylinder to the right side of the box with right gripper + subtask_index: 4 +- subtask: Move the wooden arched build blocks to the upper right side of the box + with right gripper + subtask_index: 5 +- subtask: Abnormal + subtask_index: 6 +- subtask: Move the yellow cube to the upper right side of the box with right gripper + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - press - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 173 total_frames: 52760 fps: 30 @@ -115,11 +121,9 @@ data_structure: "Airbot_MMK2_slide_block_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:172 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -389,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -397,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -424,138 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_slide_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - press the building block with right hand to move its position. - sub_tasks: - - subtask: Move the orange cylinder to the right side of the box with right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Move the blue cube to the upper right side of the box with right gripper - subtask_index: 2 - - subtask: Adjust the build blocks - subtask_index: 3 - - subtask: Move the green cylinder to the right side of the box with right gripper - subtask_index: 4 - - subtask: Move the wooden arched build blocks to the upper right side of the box - with right gripper - subtask_index: 5 - - subtask: Abnormal - subtask_index: 6 - - subtask: Move the yellow cube to the upper right side of the box with right gripper - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - press - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 52760 - dataset_size: 2.13 GB - data_structure: "Airbot_MMK2_slide_block_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (161 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_slide_block_onto_post.yaml b/dataset_info/Airbot_MMK2_slide_block_onto_post.yaml index 7dd4c36da2ac6f5ed92dfd92e35833afdd911472..97d43efee4b0192e454fafb1982f3dac37fa8bd7 100644 --- a/dataset_info/Airbot_MMK2_slide_block_onto_post.yaml +++ b/dataset_info/Airbot_MMK2_slide_block_onto_post.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: early_education_toys level1: toys level2: early_education_toys @@ -39,32 +39,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the toy back to its original position with right hand. +task_instruction: +- put the toy back to its original position with right hand. sub_tasks: -- Grasp the red cylindrical build blocks with the right gripper -- End -- Place the green cylindrical build blocks on the 1st pillar of the block base with - the right gripper -- Place the red cylindrical build blocks on the 1st pillar of the block base with - the right gripper -- Abnormal -- Grasp the green cylindrical build blocks with the right gripper -- 'null' +- subtask: Grasp the red cylindrical build blocks with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the green cylindrical build blocks on the 1st pillar of the block + base with the right gripper + subtask_index: 2 +- subtask: Place the red cylindrical build blocks on the 1st pillar of the block base + with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Grasp the green cylindrical build blocks with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +81,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +92,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 189 total_frames: 36378 fps: 30 @@ -116,11 +120,9 @@ data_structure: "Airbot_MMK2_slide_block_onto_post_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:188 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -390,7 +392,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -398,7 +400,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -425,136 +426,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_slide_block_onto_post - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the toy back to its original position with right hand. - sub_tasks: - - subtask: Grasp the red cylindrical build blocks with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the green cylindrical build blocks on the 1st pillar of the block - base with the right gripper - subtask_index: 2 - - subtask: Place the red cylindrical build blocks on the 1st pillar of the block - base with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Grasp the green cylindrical build blocks with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 36378 - dataset_size: 1.31 GB - data_structure: "Airbot_MMK2_slide_block_onto_post_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (177 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_slide_tape_onto_can.yaml b/dataset_info/Airbot_MMK2_slide_tape_onto_can.yaml index 01ca7e5d26e000d626e9a02ffc09a493e6961c55..a11caefbf1c6571e86dc009725565ecad30e39af 100644 --- a/dataset_info/Airbot_MMK2_slide_tape_onto_can.yaml +++ b/dataset_info/Airbot_MMK2_slide_tape_onto_can.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: early_education_toys level1: toys level2: early_education_toys @@ -45,33 +45,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the rings with left and right hands respectively and place - them on the beverage cans. +task_instruction: +- pick up the rings with left and right hands respectively and place them on the beverage + cans. sub_tasks: -- Grasp the transparent circular ring on the left with the left gripper -- Place the transparent circular ring on the left onto the fanta in the middle with - left gripper -- Place the transparent circular ring on the right onto the fanta in the middle with - right gripper -- Abnormal -- Grasp the transparent circular ring on the right with the right gripper -- End -- 'null' +- subtask: Grasp the transparent circular ring on the left with the left gripper + subtask_index: 0 +- subtask: Place the transparent circular ring on the left onto the fanta in the + middle with left gripper + subtask_index: 1 +- subtask: Place the transparent circular ring on the right onto the fanta in the + middle with right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Grasp the transparent circular ring on the right with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +88,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 44 total_frames: 10133 fps: 30 @@ -123,11 +127,9 @@ data_structure: "Airbot_MMK2_slide_tape_onto_can_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:43 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -397,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -405,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -432,137 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_slide_tape_onto_can - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the rings with left and right hands respectively and place them on the - beverage cans. - sub_tasks: - - subtask: Grasp the transparent circular ring on the left with the left gripper - subtask_index: 0 - - subtask: Place the transparent circular ring on the left onto the fanta in the - middle with left gripper - subtask_index: 1 - - subtask: Place the transparent circular ring on the right onto the fanta in the - middle with right gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Grasp the transparent circular ring on the right with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10133 - dataset_size: 490.04 MB - data_structure: "Airbot_MMK2_slide_tape_onto_can_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (32 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_stack_block.yaml b/dataset_info/Airbot_MMK2_stack_block.yaml index 4cf539016a2f46d3655bf8c983455c33e1d4909f..9b312dfeb1014fc8fe8047bce277ef097996fd91 100644 --- a/dataset_info/Airbot_MMK2_stack_block.yaml +++ b/dataset_info/Airbot_MMK2_stack_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: square_building_blocks level1: toys level2: square_building_blocks @@ -45,45 +45,68 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the square building blocks in the center with left hand and - put the cylindrical building blocks on top with right hand. +task_instruction: +- place the square building blocks in the center with left hand and put the cylindrical + building blocks on top with right hand. sub_tasks: -- Grasp the blue build blocks with the left gripper -- Grasp the green build blocks with the right gripper -- Place the blue build blocks on the Mini table with the left gripper -- Place the green build blocks on the blue build block with the right gripper -- Grasp the blue diamond shaped build blocks with the right gripper -- Grasp the red build blocks with the right gripper -- Place the blue diamond shaped build blocks on the glasses case with the right gripper -- Place the purple build blocks on the red build block with the right gripper -- Place the green build blocks on the center of the table with the left gripper -- Grasp the green build blocks with the left gripper -- Place the glasses case on the red and green build blocks with the left gripper -- Grasp the purple build blocks with the right gripper -- Place the blue build blocks on the yellow build block with the left gripper -- Grasp the glasses case with the left gripper -- Place the green cylindrical build blocks on the green build blocks with the right - gripper -- End -- Place the blue build blocks on the pink build block with the left gripper -- Place the red build blocks on the blue build block with the right gripper -- Grasp the green cylindrical build blocks with the right gripper -- 'null' +- subtask: Grasp the blue build blocks with the left gripper + subtask_index: 0 +- subtask: Grasp the green build blocks with the right gripper + subtask_index: 1 +- subtask: Place the blue build blocks on the Mini table with the left gripper + subtask_index: 2 +- subtask: Place the green build blocks on the blue build block with the right gripper + subtask_index: 3 +- subtask: Grasp the blue diamond shaped build blocks with the right gripper + subtask_index: 4 +- subtask: Grasp the red build blocks with the right gripper + subtask_index: 5 +- subtask: Place the blue diamond shaped build blocks on the glasses case with the + right gripper + subtask_index: 6 +- subtask: Place the purple build blocks on the red build block with the right gripper + subtask_index: 7 +- subtask: Place the green build blocks on the center of the table with the left gripper + subtask_index: 8 +- subtask: Grasp the green build blocks with the left gripper + subtask_index: 9 +- subtask: Place the glasses case on the red and green build blocks with the left + gripper + subtask_index: 10 +- subtask: Grasp the purple build blocks with the right gripper + subtask_index: 11 +- subtask: Place the blue build blocks on the yellow build block with the left gripper + subtask_index: 12 +- subtask: Grasp the glasses case with the left gripper + subtask_index: 13 +- subtask: Place the green cylindrical build blocks on the green build blocks with + the right gripper + subtask_index: 14 +- subtask: End + subtask_index: 15 +- subtask: Place the blue build blocks on the pink build block with the left gripper + subtask_index: 16 +- subtask: Place the red build blocks on the blue build block with the right gripper + subtask_index: 17 +- subtask: Grasp the green cylindrical build blocks with the right gripper + subtask_index: 18 +- subtask: 'null' + subtask_index: 19 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -92,13 +115,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -106,8 +126,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 236 total_frames: 60925 fps: 30 @@ -134,11 +153,9 @@ data_structure: "Airbot_MMK2_stack_block_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:235 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -408,7 +425,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -416,7 +433,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -443,165 +459,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_stack_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the square building blocks in the center with left hand and put the cylindrical - building blocks on top with right hand. - sub_tasks: - - subtask: Grasp the blue build blocks with the left gripper - subtask_index: 0 - - subtask: Grasp the green build blocks with the right gripper - subtask_index: 1 - - subtask: Place the blue build blocks on the Mini table with the left gripper - subtask_index: 2 - - subtask: Place the green build blocks on the blue build block with the right gripper - subtask_index: 3 - - subtask: Grasp the blue diamond shaped build blocks with the right gripper - subtask_index: 4 - - subtask: Grasp the red build blocks with the right gripper - subtask_index: 5 - - subtask: Place the blue diamond shaped build blocks on the glasses case with the - right gripper - subtask_index: 6 - - subtask: Place the purple build blocks on the red build block with the right gripper - subtask_index: 7 - - subtask: Place the green build blocks on the center of the table with the left - gripper - subtask_index: 8 - - subtask: Grasp the green build blocks with the left gripper - subtask_index: 9 - - subtask: Place the glasses case on the red and green build blocks with the left - gripper - subtask_index: 10 - - subtask: Grasp the purple build blocks with the right gripper - subtask_index: 11 - - subtask: Place the blue build blocks on the yellow build block with the left gripper - subtask_index: 12 - - subtask: Grasp the glasses case with the left gripper - subtask_index: 13 - - subtask: Place the green cylindrical build blocks on the green build blocks with - the right gripper - subtask_index: 14 - - subtask: End - subtask_index: 15 - - subtask: Place the blue build blocks on the pink build block with the left gripper - subtask_index: 16 - - subtask: Place the red build blocks on the blue build block with the right gripper - subtask_index: 17 - - subtask: Grasp the green cylindrical build blocks with the right gripper - subtask_index: 18 - - subtask: 'null' - subtask_index: 19 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 60925 - dataset_size: 2.00 GB - data_structure: "Airbot_MMK2_stack_block_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (224 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_stack_bowl.yaml b/dataset_info/Airbot_MMK2_stack_bowl.yaml index 512a57954634f80cc1f985781e3148017e1a9475..5d498527e5b40fe539888a57fe7acd10f79f9e8d 100644 --- a/dataset_info/Airbot_MMK2_stack_bowl.yaml +++ b/dataset_info/Airbot_MMK2_stack_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: bowl level1: plastic_bowls level2: bowl @@ -39,29 +39,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the small bowl into the big bowl by hand. +task_instruction: +- put the small bowl into the big bowl by hand. sub_tasks: -- Grasp the white bowl with the left gripper -- Abnormal -- Place the white bowl on the pink bowl with the left gripper -- End -- Static -- 'null' +- subtask: Grasp the white bowl with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Place the white bowl on the pink bowl with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -70,13 +77,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -84,8 +88,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 9073 fps: 30 @@ -172,11 +175,9 @@ data_structure: 'Airbot_MMK2_Airbot_MMK2_stack_bowl_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -446,7 +447,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -454,7 +455,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -481,191 +481,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_stack_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: catering - level2: restaurant - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the small bowl into the big bowl by hand. - sub_tasks: - - subtask: Grasp the white bowl with the left gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Place the white bowl on the pink bowl with the left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9073 - dataset_size: 339.42 MB - data_structure: 'Airbot_MMK2_Airbot_MMK2_stack_bowl_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_stack_cubic_block.yaml b/dataset_info/Airbot_MMK2_stack_cubic_block.yaml index c144f3752922fcaf7e2af1f2b0247fbd3e740c5a..e0541844820186d72be185620e75511a46b4661a 100644 --- a/dataset_info/Airbot_MMK2_stack_cubic_block.yaml +++ b/dataset_info/Airbot_MMK2_stack_cubic_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: square_building_blocks level1: toys level2: square_building_blocks @@ -39,37 +39,53 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the building blocks with left and right hands respectively - and place them on the blocks. +task_instruction: +- pick up the building blocks with left and right hands respectively and place them + on the blocks. sub_tasks: -- Grasp the blue build blocks with the right gripper -- End -- Place the blue build blocks on the red build block with the right gripper -- Place the yellow build blocks on the orange build block with the right gripper -- Place the red build blocks on the center of the table with the left gripper -- Place the orange build blocks on the yellow build block with the right gripper -- Grasp the blue build blocks with the left gripper -- Grasp the red build blocks with the left gripper -- Abnormal -- Grasp the orange build blocks with the right gripper -- Place the blue build blocks on the red build block with the left gripper -- Grasp the yellow build blocks with the right gripper -- 'null' +- subtask: Grasp the blue build blocks with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the blue build blocks on the red build block with the right gripper + subtask_index: 2 +- subtask: Place the yellow build blocks on the orange build block with the right + gripper + subtask_index: 3 +- subtask: Place the red build blocks on the center of the table with the left gripper + subtask_index: 4 +- subtask: Place the orange build blocks on the yellow build block with the right + gripper + subtask_index: 5 +- subtask: Grasp the blue build blocks with the left gripper + subtask_index: 6 +- subtask: Grasp the red build blocks with the left gripper + subtask_index: 7 +- subtask: Abnormal + subtask_index: 8 +- subtask: Grasp the orange build blocks with the right gripper + subtask_index: 9 +- subtask: Place the blue build blocks on the red build block with the left gripper + subtask_index: 10 +- subtask: Grasp the yellow build blocks with the right gripper + subtask_index: 11 +- subtask: 'null' + subtask_index: 12 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +94,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +105,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 162 total_frames: 48691 fps: 30 @@ -121,11 +133,9 @@ data_structure: "Airbot_MMK2_stack_cubic_block_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:161 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +405,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +413,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,149 +439,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_stack_cubic_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the building blocks with left and right hands respectively and place them - on the blocks. - sub_tasks: - - subtask: Grasp the blue build blocks with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the blue build blocks on the red build block with the right gripper - subtask_index: 2 - - subtask: Place the yellow build blocks on the orange build block with the right - gripper - subtask_index: 3 - - subtask: Place the red build blocks on the center of the table with the left gripper - subtask_index: 4 - - subtask: Place the orange build blocks on the yellow build block with the right - gripper - subtask_index: 5 - - subtask: Grasp the blue build blocks with the left gripper - subtask_index: 6 - - subtask: Grasp the red build blocks with the left gripper - subtask_index: 7 - - subtask: Abnormal - subtask_index: 8 - - subtask: Grasp the orange build blocks with the right gripper - subtask_index: 9 - - subtask: Place the blue build blocks on the red build block with the left gripper - subtask_index: 10 - - subtask: Grasp the yellow build blocks with the right gripper - subtask_index: 11 - - subtask: 'null' - subtask_index: 12 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 48691 - dataset_size: 1.92 GB - data_structure: "Airbot_MMK2_stack_cubic_block_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (150 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_stack_cup.yaml b/dataset_info/Airbot_MMK2_stack_cup.yaml index 5eb254c58e2586988e82c775c23a2390c65ec518..07e8ba65b1977479519a404163fc61b0c133a27a 100644 --- a/dataset_info/Airbot_MMK2_stack_cup.yaml +++ b/dataset_info/Airbot_MMK2_stack_cup.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cup level1: kitchen_supplies level2: cup @@ -39,29 +39,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the cup by hand and stack it on top of another cup. +task_instruction: +- pick up the cup by hand and stack it on top of another cup. sub_tasks: -- Place the purple cup on the pink cup with the left gripper -- End -- Grasp the white cup with the right gripper -- Place the white cup on the purple cup with the right gripper -- Grasp the purple cup with the left gripper -- 'null' +- subtask: Place the purple cup on the pink cup with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the white cup with the right gripper + subtask_index: 2 +- subtask: Place the white cup on the purple cup with the right gripper + subtask_index: 3 +- subtask: Grasp the purple cup with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -70,13 +77,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -84,8 +88,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 67637 fps: 30 @@ -112,11 +115,9 @@ data_structure: "Airbot_MMK2_stack_cup_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -386,7 +387,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -394,7 +395,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -421,132 +421,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_stack_cup - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the cup by hand and stack it on top of another cup. - sub_tasks: - - subtask: Place the purple cup on the pink cup with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the white cup with the right gripper - subtask_index: 2 - - subtask: Place the white cup on the purple cup with the right gripper - subtask_index: 3 - - subtask: Grasp the purple cup with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 67637 - dataset_size: 2.37 GB - data_structure: "Airbot_MMK2_stack_cup_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml b/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml index a7f23d26d00ae0c9f51375f82b9803e3964c7678..1886340e7b5b3bc7e5e52f817d10aff55e9b2552 100644 --- a/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml +++ b/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cake level1: bread level2: cake @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the cake into the plate with left hand and take it out with - right hand. +task_instruction: +- put the cake into the plate with left hand and take it out with right hand. sub_tasks: -- Place the cake on the table with the right gripper -- Grasp the cake with the left gripper -- Static -- Place the cake into the white plate with the left gripper -- Grasp the cake on the plate with the right gripper -- End -- 'null' +- subtask: Place the cake on the table with the right gripper + subtask_index: 0 +- subtask: Grasp the cake with the left gripper + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Place the cake into the white plate with the left gripper + subtask_index: 3 +- subtask: Grasp the cake on the plate with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 9782 fps: 30 @@ -121,11 +124,9 @@ data_structure: "Airbot_MMK2_storage_and_take_cake_plate_qced_hardlink/\n|-- ann \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,134 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_and_take_cake_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the cake into the plate with left hand and take it out with right hand. - sub_tasks: - - subtask: Place the cake on the table with the right gripper - subtask_index: 0 - - subtask: Grasp the cake with the left gripper - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: Place the cake into the white plate with the left gripper - subtask_index: 3 - - subtask: Grasp the cake on the plate with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9782 - dataset_size: 487.34 MB - data_structure: "Airbot_MMK2_storage_and_take_cake_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_apple_orange.yaml b/dataset_info/Airbot_MMK2_storage_apple_orange.yaml index 723327fab8bec8ede5fab186859ba9f2ca58393f..a66df96d55b0b4bd2b6267988625dc83ff78ea2f 100644 --- a/dataset_info/Airbot_MMK2_storage_apple_orange.yaml +++ b/dataset_info/Airbot_MMK2_storage_apple_orange.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_storage_apple_orange dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household - level2: kitchen + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: apple level1: fruits level2: apple @@ -48,30 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the apple with left hand and put it in the storage box, - and pick up the orange with right hand and put it in the storage box. +task_instruction: +- pick up the apple with left hand and put it in the storage box, and pick up the + orange with right hand and put it in the storage box. sub_tasks: -- Grasp the orange with the right gripper -- End -- Place the orange into the right compartment of the storage box with the right gripper -- Grasp the apple with the left gripper -- Place the apple into the left compartment of the storage box with the left gripper -- 'null' +- subtask: Grasp the orange with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the orange into the right compartment of the storage box with the + right gripper + subtask_index: 2 +- subtask: Grasp the apple with the left gripper + subtask_index: 3 +- subtask: Place the apple into the left compartment of the storage box with the + left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 41 total_frames: 6657 fps: 30 @@ -123,11 +131,9 @@ data_structure: "Airbot_MMK2_storage_apple_orange_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:40 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -397,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -405,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -432,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_apple_orange - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the apple with left hand and put it in the storage box, and pick up the - orange with right hand and put it in the storage box. - sub_tasks: - - subtask: Grasp the orange with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the orange into the right compartment of the storage box with the - right gripper - subtask_index: 2 - - subtask: Grasp the apple with the left gripper - subtask_index: 3 - - subtask: Place the apple into the left compartment of the storage box with the - left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6657 - dataset_size: 176.34 MB - data_structure: "Airbot_MMK2_storage_apple_orange_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_badminton.yaml b/dataset_info/Airbot_MMK2_storage_badminton.yaml index 8a9e74912e32164fdd68886ec8f5cfbc654d48d6..535fd2c2f6365b28e00794a006211de1bdc3a1e1 100644 --- a/dataset_info/Airbot_MMK2_storage_badminton.yaml +++ b/dataset_info/Airbot_MMK2_storage_badminton.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: badminton level1: toys level2: badminton @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the badminton shuttlecock into the storage box with left and - right hands respectively. +task_instruction: +- put the badminton shuttlecock into the storage box with left and right hands respectively. sub_tasks: -- Grasp the badminton with the right gripper -- Place the badminton into the left compartment of the storage box with the left gripper -- End -- Grasp the badminton with the left gripper -- Place the badminton into the right compartment of the storage box with the right - gripper -- 'null' +- subtask: Grasp the badminton with the right gripper + subtask_index: 0 +- subtask: Place the badminton into the left compartment of the storage box with the + left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the badminton with the left gripper + subtask_index: 3 +- subtask: Place the badminton into the right compartment of the storage box with + the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 8382 fps: 30 @@ -121,11 +124,9 @@ data_structure: "Airbot_MMK2_storage_badminton_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,134 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_badminton - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: leisure_plaza - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the badminton shuttlecock into the storage box with left and right hands respectively. - sub_tasks: - - subtask: Grasp the badminton with the right gripper - subtask_index: 0 - - subtask: Place the badminton into the left compartment of the storage box with - the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the badminton with the left gripper - subtask_index: 3 - - subtask: Place the badminton into the right compartment of the storage box with - the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8382 - dataset_size: 253.84 MB - data_structure: "Airbot_MMK2_storage_badminton_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_bell_pepper.yaml b/dataset_info/Airbot_MMK2_storage_bell_pepper.yaml index ef9d7e043119dd0744e6f1ec287256e3a42496c6..7025ff5830004197ae9561a7b0da3219710af0c5 100644 --- a/dataset_info/Airbot_MMK2_storage_bell_pepper.yaml +++ b/dataset_info/Airbot_MMK2_storage_bell_pepper.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_storage_bell_pepper dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household - level2: kitchen + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: red_bell_pepper level1: vegetables level2: red_bell_pepper @@ -48,31 +51,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the red peppers and yellow peppers into the box. +task_instruction: +- put the red peppers and yellow peppers into the box. sub_tasks: -- Place the yellow pepper into the left compartment of the storage box with the left - gripper -- End -- Grasp the green pepper with the right gripper -- Place the green pepper into the right compartment of the storage box with the right - gripper -- Grasp the yellow pepper with the left gripper -- 'null' +- subtask: Place the yellow pepper into the left compartment of the storage box with + the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the green pepper with the right gripper + subtask_index: 2 +- subtask: Place the green pepper into the right compartment of the storage box with + the right gripper + subtask_index: 3 +- subtask: Grasp the yellow pepper with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 7525 fps: 30 @@ -124,11 +130,9 @@ data_structure: "Airbot_MMK2_storage_bell_pepper_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -398,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -406,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -433,134 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_bell_pepper - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the red peppers and yellow peppers into the box. - sub_tasks: - - subtask: Place the yellow pepper into the left compartment of the storage box - with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the green pepper with the right gripper - subtask_index: 2 - - subtask: Place the green pepper into the right compartment of the storage box - with the right gripper - subtask_index: 3 - - subtask: Grasp the yellow pepper with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7525 - dataset_size: 200.59 MB - data_structure: "Airbot_MMK2_storage_bell_pepper_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_bell_pepper_bowl.yaml b/dataset_info/Airbot_MMK2_storage_bell_pepper_bowl.yaml index 5f87a7b4cdd5e9847645b578bd00788011f05b23..a002e14a8557a611dd1477632fa4e4fae691e7c7 100644 --- a/dataset_info/Airbot_MMK2_storage_bell_pepper_bowl.yaml +++ b/dataset_info/Airbot_MMK2_storage_bell_pepper_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: yellow_bell_pepper level1: vegetables level2: yellow_bell_pepper @@ -45,29 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the yellow green peppers into the bowl with right hand. +task_instruction: +- put the yellow green peppers into the bowl with right hand. sub_tasks: -- Abnormal -- Place yellow round chili pepper on the blue bowl with the right gripper -- Grasp the yellow round chili pepper with the right gripper -- End -- Static -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Place yellow round chili pepper on the blue bowl with the right gripper + subtask_index: 1 +- subtask: Grasp the yellow round chili pepper with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 4495 fps: 30 @@ -119,11 +122,9 @@ data_structure: "Airbot_MMK2_Airbot_MMK2_storage_bell_pepper_bowl_qced_hardlink/ \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +394,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +402,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,132 +428,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_bell_pepper_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: living_room - level2: household - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the yellow green peppers into the bowl with right hand. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Place yellow round chili pepper on the blue bowl with the right gripper - subtask_index: 1 - - subtask: Grasp the yellow round chili pepper with the right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 4495 - dataset_size: 149.48 MB - data_structure: "Airbot_MMK2_Airbot_MMK2_storage_bell_pepper_bowl_qced_hardlink/\n\ - |-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_block.yaml b/dataset_info/Airbot_MMK2_storage_block.yaml index ae2145243bd667fe28f17b0a3aa0d6296a1c683a..ebf658474d20d27663a628ac9eb191123701c265 100644 --- a/dataset_info/Airbot_MMK2_storage_block.yaml +++ b/dataset_info/Airbot_MMK2_storage_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: rectangular_building_blocks level1: building_blocks level2: rectangular_building_blocks @@ -45,49 +45,81 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the building blocks into the plate by hand. +task_instruction: +- put the building blocks into the plate by hand. sub_tasks: -- Place the green rectangular build blocks into the white plate with the right gripper -- Grasp the green cylindrical build blocks with the left gripper -- Place the red cube build blocks into the plate with the left gripper -- Place the green rectangular build blocks into the pink plate with the right gripper -- Place the green cube build blocks into the plate with the right gripper -- Place the green cylindrical build blocks into the cardboard box with the left gripper -- Place the red rectangular build blocks into the pink plate with the left gripper -- Grasp the purple cube build blocks with the left gripper -- Grasp the blue cube build blocks with the right gripper -- Abnormal -- Grasp the green cube build blocks with the right gripper -- Place the purple cube build blocks into the white plate with the left gripper -- Place the red rectangular build blocks into the white plate with the left gripper -- Place the red cube build blocks into the white plate with the right gripper -- Grasp the red cylindrical build blocks with the right gripper -- Place the blue cube build blocks on the purple cube build blocks with the right - gripper -- Place the green cube build blocks into the pink plate with the left gripper -- Grasp the green rectangular build blocks with the right gripper -- End -- Place the red cylindrical build blocks into the cardboard box with the right gripper -- Grasp the red cube build blocks with the left gripper -- Grasp the red cube build blocks with the right gripper -- Grasp the red rectangular build blocks with the left gripper -- Grasp the green cube build blocks with the left gripper -- 'null' +- subtask: Place the green rectangular build blocks into the white plate with the + right gripper + subtask_index: 0 +- subtask: Grasp the green cylindrical build blocks with the left gripper + subtask_index: 1 +- subtask: Place the red cube build blocks into the plate with the left gripper + subtask_index: 2 +- subtask: Place the green rectangular build blocks into the pink plate with the right + gripper + subtask_index: 3 +- subtask: Place the green cube build blocks into the plate with the right gripper + subtask_index: 4 +- subtask: Place the green cylindrical build blocks into the cardboard box with the + left gripper + subtask_index: 5 +- subtask: Place the red rectangular build blocks into the pink plate with the left + gripper + subtask_index: 6 +- subtask: Grasp the purple cube build blocks with the left gripper + subtask_index: 7 +- subtask: Grasp the blue cube build blocks with the right gripper + subtask_index: 8 +- subtask: Abnormal + subtask_index: 9 +- subtask: Grasp the green cube build blocks with the right gripper + subtask_index: 10 +- subtask: Place the purple cube build blocks into the white plate with the left gripper + subtask_index: 11 +- subtask: Place the red rectangular build blocks into the white plate with the left + gripper + subtask_index: 12 +- subtask: Place the red cube build blocks into the white plate with the right gripper + subtask_index: 13 +- subtask: Grasp the red cylindrical build blocks with the right gripper + subtask_index: 14 +- subtask: Place the blue cube build blocks on the purple cube build blocks with the + right gripper + subtask_index: 15 +- subtask: Place the green cube build blocks into the pink plate with the left gripper + subtask_index: 16 +- subtask: Grasp the green rectangular build blocks with the right gripper + subtask_index: 17 +- subtask: End + subtask_index: 18 +- subtask: Place the red cylindrical build blocks into the cardboard box with the + right gripper + subtask_index: 19 +- subtask: Grasp the red cube build blocks with the left gripper + subtask_index: 20 +- subtask: Grasp the red cube build blocks with the right gripper + subtask_index: 21 +- subtask: Grasp the red rectangular build blocks with the left gripper + subtask_index: 22 +- subtask: Grasp the green cube build blocks with the left gripper + subtask_index: 23 +- subtask: 'null' + subtask_index: 24 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -96,13 +128,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -110,8 +139,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 262 total_frames: 55260 fps: 30 @@ -138,11 +166,9 @@ data_structure: "Airbot_MMK2_storage_block_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:261 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -412,7 +438,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -420,7 +446,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -447,178 +472,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the building blocks into the plate by hand. - sub_tasks: - - subtask: Place the green rectangular build blocks into the white plate with the - right gripper - subtask_index: 0 - - subtask: Grasp the green cylindrical build blocks with the left gripper - subtask_index: 1 - - subtask: Place the red cube build blocks into the plate with the left gripper - subtask_index: 2 - - subtask: Place the green rectangular build blocks into the pink plate with the - right gripper - subtask_index: 3 - - subtask: Place the green cube build blocks into the plate with the right gripper - subtask_index: 4 - - subtask: Place the green cylindrical build blocks into the cardboard box with - the left gripper - subtask_index: 5 - - subtask: Place the red rectangular build blocks into the pink plate with the left - gripper - subtask_index: 6 - - subtask: Grasp the purple cube build blocks with the left gripper - subtask_index: 7 - - subtask: Grasp the blue cube build blocks with the right gripper - subtask_index: 8 - - subtask: Abnormal - subtask_index: 9 - - subtask: Grasp the green cube build blocks with the right gripper - subtask_index: 10 - - subtask: Place the purple cube build blocks into the white plate with the left - gripper - subtask_index: 11 - - subtask: Place the red rectangular build blocks into the white plate with the - left gripper - subtask_index: 12 - - subtask: Place the red cube build blocks into the white plate with the right gripper - subtask_index: 13 - - subtask: Grasp the red cylindrical build blocks with the right gripper - subtask_index: 14 - - subtask: Place the blue cube build blocks on the purple cube build blocks with - the right gripper - subtask_index: 15 - - subtask: Place the green cube build blocks into the pink plate with the left gripper - subtask_index: 16 - - subtask: Grasp the green rectangular build blocks with the right gripper - subtask_index: 17 - - subtask: End - subtask_index: 18 - - subtask: Place the red cylindrical build blocks into the cardboard box with the - right gripper - subtask_index: 19 - - subtask: Grasp the red cube build blocks with the left gripper - subtask_index: 20 - - subtask: Grasp the red cube build blocks with the right gripper - subtask_index: 21 - - subtask: Grasp the red rectangular build blocks with the left gripper - subtask_index: 22 - - subtask: Grasp the green cube build blocks with the left gripper - subtask_index: 23 - - subtask: 'null' - subtask_index: 24 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 55260 - dataset_size: 2.17 GB - data_structure: "Airbot_MMK2_storage_block_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ - \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (250 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_block_BBs.yaml b/dataset_info/Airbot_MMK2_storage_block_BBs.yaml index 3b2b64ad7c07b735d470b58410c3e62dc6b33a47..e9baeedf5e28ad0d84b8439e1264c23acb0bff90 100644 --- a/dataset_info/Airbot_MMK2_storage_block_BBs.yaml +++ b/dataset_info/Airbot_MMK2_storage_block_BBs.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_storage_block_BBs dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household - level2: living_room + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: null level1: bb_pellets level2: ball @@ -48,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the bb pellets and rectangular building blocks into the bowl. +task_instruction: +- put the bb pellets and rectangular building blocks into the bowl. sub_tasks: -- Place the green cuboid block into the bowl with the right gripper -- Grasp the green cuboid block with the right gripper -- Grasp the bullet with the left gripper -- Place the bullet into the bowl with the left gripper -- End -- 'null' +- subtask: Place the green cuboid block into the bowl with the right gripper + subtask_index: 0 +- subtask: Grasp the green cuboid block with the right gripper + subtask_index: 1 +- subtask: Grasp the bullet with the left gripper + subtask_index: 2 +- subtask: Place the bullet into the bowl with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - pick - clip - place - lift -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 8640 fps: 30 @@ -182,11 +188,9 @@ data_structure: 'Airbot_MMK2_storage_block_BBs_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -456,7 +460,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -464,7 +468,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -491,192 +494,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_block_BBs - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the bb pellets and rectangular building blocks into the bowl. - sub_tasks: - - subtask: Place the green cuboid block into the bowl with the right gripper - subtask_index: 0 - - subtask: Grasp the green cuboid block with the right gripper - subtask_index: 1 - - subtask: Grasp the bullet with the left gripper - subtask_index: 2 - - subtask: Place the bullet into the bowl with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - pick - - clip - - place - - lift - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8640 - dataset_size: 282.66 MB - data_structure: 'Airbot_MMK2_storage_block_BBs_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (36 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml b/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml index 84dafff1004fac976a197516473234d094eeb86e..d4ce797f27e8884c2408693d00ce8c202ef3c55b 100644 --- a/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml +++ b/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: square_building_blocks level1: toys level2: square_building_blocks @@ -45,30 +45,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the building blocks with both hands simultaneously and put - them into the white storage box. +task_instruction: +- pick up the building blocks with both hands simultaneously and put them into the + white storage box. sub_tasks: -- Grasp the red block with the left gripper -- End -- Grasp the orange block with the right gripper -- Place the red block into the white basket with the left gripper -- Place the orange block into the white basket with the right gripper -- 'null' +- subtask: Grasp the red block with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the orange block with the right gripper + subtask_index: 2 +- subtask: Place the red block into the white basket with the left gripper + subtask_index: 3 +- subtask: Place the orange block into the white basket with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 3769 fps: 30 @@ -120,11 +123,9 @@ data_structure: "Airbot_MMK2_storage_block_both_hands_qced_hardlink/\n|-- annota \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,133 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_block_both_hands - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the building blocks with both hands simultaneously and put them into the - white storage box. - sub_tasks: - - subtask: Grasp the red block with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the orange block with the right gripper - subtask_index: 2 - - subtask: Place the red block into the white basket with the left gripper - subtask_index: 3 - - subtask: Place the orange block into the white basket with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 3769 - dataset_size: 104.08 MB - data_structure: "Airbot_MMK2_storage_block_both_hands_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_block_tape_measure.yaml b/dataset_info/Airbot_MMK2_storage_block_tape_measure.yaml index 8229052731afe4a5099215b64a5b7ae3708f08d6..3d3303b9e81bc1a150125c4a2a44cd1d4d7463d0 100644 --- a/dataset_info/Airbot_MMK2_storage_block_tape_measure.yaml +++ b/dataset_info/Airbot_MMK2_storage_block_tape_measure.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: ruler_set level1: stationery level2: ruler_set @@ -57,32 +57,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the building blocks in the bowl with left hand and the ruler - in the plate with right hand. +task_instruction: +- place the building blocks in the bowl with left hand and the ruler in the plate + with right hand. sub_tasks: -- Place the tape measure into the pink plate with the right gripper -- Grasp the tape measure with the right gripper -- Place the green rectangular block into the blue bowl with the left gripper -- Static -- Grasp the green rectangular block with the left gripper -- Abnormal -- End -- 'null' +- subtask: Place the tape measure into the pink plate with the right gripper + subtask_index: 0 +- subtask: Grasp the tape measure with the right gripper + subtask_index: 1 +- subtask: Place the green rectangular block into the blue bowl with the left gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Grasp the green rectangular block with the left gripper + subtask_index: 4 +- subtask: Abnormal + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -91,13 +100,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -105,8 +111,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 8921 fps: 30 @@ -193,11 +198,9 @@ data_structure: 'Airbot_MMK2_storage_block_tape_measure_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -467,7 +470,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -475,7 +478,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -502,196 +504,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_block_tape_measure - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the building blocks in the bowl with left hand and the ruler in the plate - with right hand. - sub_tasks: - - subtask: Place the tape measure into the pink plate with the right gripper - subtask_index: 0 - - subtask: Grasp the tape measure with the right gripper - subtask_index: 1 - - subtask: Place the green rectangular block into the blue bowl with the left gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Grasp the green rectangular block with the left gripper - subtask_index: 4 - - subtask: Abnormal - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8921 - dataset_size: 371.90 MB - data_structure: 'Airbot_MMK2_storage_block_tape_measure_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (36 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_book.yaml b/dataset_info/Airbot_MMK2_storage_book.yaml index 1b23a78db833c2ea173cc1f17569088583df25ea..c2afa0748a3cdcd197d0ad33ef055c76417b2e52 100644 --- a/dataset_info/Airbot_MMK2_storage_book.yaml +++ b/dataset_info/Airbot_MMK2_storage_book.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: bookshelves level1: bookshelves level2: bookshelves @@ -45,32 +45,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the book with right hand and place it on the bookshelf. +task_instruction: +- pick up the book with right hand and place it on the bookshelf. sub_tasks: -- End -- Grasp the book with the right gripper -- Place the book on the bookshelf with the left gripper -- Place the book on the bookshelf with the right gripper -- Push the book to the edge of the table with the right gripper -- Grasp the book with the left gripper -- Grasp the third book from the right with the left gripper -- Abnormal -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the book with the right gripper + subtask_index: 1 +- subtask: Place the book on the bookshelf with the left gripper + subtask_index: 2 +- subtask: Place the book on the bookshelf with the right gripper + subtask_index: 3 +- subtask: Push the book to the edge of the table with the right gripper + subtask_index: 4 +- subtask: Grasp the book with the left gripper + subtask_index: 5 +- subtask: Grasp the third book from the right with the left gripper + subtask_index: 6 +- subtask: Abnormal + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 237 total_frames: 91429 fps: 30 @@ -121,11 +127,9 @@ data_structure: "Airbot_MMK2_storage_book_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:236 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +399,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +407,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,138 +433,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_book - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the book with right hand and place it on the bookshelf. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the book with the right gripper - subtask_index: 1 - - subtask: Place the book on the bookshelf with the left gripper - subtask_index: 2 - - subtask: Place the book on the bookshelf with the right gripper - subtask_index: 3 - - subtask: Push the book to the edge of the table with the right gripper - subtask_index: 4 - - subtask: Grasp the book with the left gripper - subtask_index: 5 - - subtask: Grasp the third book from the right with the left gripper - subtask_index: 6 - - subtask: Abnormal - subtask_index: 7 - - subtask: 'null' - subtask_index: 8 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 91429 - dataset_size: 4.07 GB - data_structure: "Airbot_MMK2_storage_book_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (225 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_bottle_part.yaml b/dataset_info/Airbot_MMK2_storage_bottle_part.yaml index b533016add6adb9e8fedc55e76d48d02e2bab426..2215ddc43b457c4ac08ea7fcbebca4e24b802735 100644 --- a/dataset_info/Airbot_MMK2_storage_bottle_part.yaml +++ b/dataset_info/Airbot_MMK2_storage_bottle_part.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: water_bottle level1: beverages level2: water_bottle @@ -51,33 +51,44 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the water bottle and handle into the cardboard box. +task_instruction: +- put the water bottle and handle into the cardboard box. sub_tasks: -- Grasp the water bottle with the right gripper -- Place the water bottle into the cardboard box with the right gripper -- Lift the water bottle with the right gripper -- Lift the remote control clip with the left gripper -- Abnormal -- Static -- Place the remote control clip into the cardboard box with the left gripper -- End -- Grasp the remote control clip with the left gripper -- 'null' +- subtask: Grasp the water bottle with the right gripper + subtask_index: 0 +- subtask: Place the water bottle into the cardboard box with the right gripper + subtask_index: 1 +- subtask: Lift the water bottle with the right gripper + subtask_index: 2 +- subtask: 'Lift the remote control clip with the left gripper ' + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Static + subtask_index: 5 +- subtask: Place the remote control clip into the cardboard box with the left gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: 'Grasp the remote control clip with the left gripper ' + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -86,13 +97,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -100,8 +108,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 10499 fps: 30 @@ -129,11 +136,9 @@ data_structure: "Airbot_MMK2_storage_bottle_part_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -403,7 +408,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -411,7 +416,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -438,140 +442,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_bottle_part - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the water bottle and handle into the cardboard box. - sub_tasks: - - subtask: Grasp the water bottle with the right gripper - subtask_index: 0 - - subtask: Place the water bottle into the cardboard box with the right gripper - subtask_index: 1 - - subtask: Lift the water bottle with the right gripper - subtask_index: 2 - - subtask: 'Lift the remote control clip with the left gripper ' - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Static - subtask_index: 5 - - subtask: Place the remote control clip into the cardboard box with the left gripper - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: 'Grasp the remote control clip with the left gripper ' - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10499 - dataset_size: 508.16 MB - data_structure: "Airbot_MMK2_storage_bottle_part_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_bowl.yaml b/dataset_info/Airbot_MMK2_storage_bowl.yaml index 5f042242d1e8bae631be7636091a08e6c968e99c..4a12660da30e493b8244385b2771f1b6a7f3473f 100644 --- a/dataset_info/Airbot_MMK2_storage_bowl.yaml +++ b/dataset_info/Airbot_MMK2_storage_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: bowl level1: plastic_bowls level2: bowl @@ -45,30 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick the bowls on the table by hands and place them into the yellow - basin. +task_instruction: +- pick the bowls on the table by hands and place them into the yellow basin. sub_tasks: -- Grasp the pink bowl with the left gripper -- End -- Grasp the white bowl with the right gripper -- Place the pink bowl into the yellow storage box with the left gripper -- Place the white bowl into the yellow storage box with the right gripper -- 'null' +- subtask: Grasp the pink bowl with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the white bowl with the right gripper + subtask_index: 2 +- subtask: Place the pink bowl into the yellow storage box with the left gripper + subtask_index: 3 +- subtask: Place the white bowl into the yellow storage box with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 46 total_frames: 10955 fps: 30 @@ -119,11 +121,9 @@ data_structure: "Airbot_MMK2_storage_bowl_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:45 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,132 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: catering - level2: restaurant - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick the bowls on the table by hands and place them into the yellow basin. - sub_tasks: - - subtask: Grasp the pink bowl with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the white bowl with the right gripper - subtask_index: 2 - - subtask: Place the pink bowl into the yellow storage box with the left gripper - subtask_index: 3 - - subtask: Place the white bowl into the yellow storage box with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10955 - dataset_size: 432.12 MB - data_structure: "Airbot_MMK2_storage_bowl_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (34 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_bowl_wet_wipes.yaml b/dataset_info/Airbot_MMK2_storage_bowl_wet_wipes.yaml index 7cd536636facca39cc4a3219a3ac8f6fb6ec23ef..798854f377d2578c7625c3134896b066290175dc 100644 --- a/dataset_info/Airbot_MMK2_storage_bowl_wet_wipes.yaml +++ b/dataset_info/Airbot_MMK2_storage_bowl_wet_wipes.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -57,30 +57,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the bowl and wet wipes on the plate. +task_instruction: +- put the bowl and wet wipes on the plate. sub_tasks: -- Place the wet wipes into the bowl with the left gripper -- Place the bowl on the plate with the right gripper -- Static -- Grasp the wet wipes with the left gripper -- End -- Grasp the bowl with the right gripper -- 'null' +- subtask: Place the wet wipes into the bowl with the left gripper + subtask_index: 0 +- subtask: Place the bowl on the plate with the right gripper + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Grasp the wet wipes with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the bowl with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -89,13 +97,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -103,8 +108,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 13058 fps: 30 @@ -191,11 +195,9 @@ data_structure: 'Airbot_MMK2_storage_bowl_wet_wipes_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -465,7 +467,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -473,7 +475,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -500,193 +501,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_bowl_wet_wipes - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the bowl and wet wipes on the plate. - sub_tasks: - - subtask: Place the wet wipes into the bowl with the left gripper - subtask_index: 0 - - subtask: Place the bowl on the plate with the right gripper - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: Grasp the wet wipes with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the bowl with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13058 - dataset_size: 425.41 MB - data_structure: 'Airbot_MMK2_storage_bowl_wet_wipes_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_braised_pork_belly_shrimp.yaml b/dataset_info/Airbot_MMK2_storage_braised_pork_belly_shrimp.yaml index b57ed564f6e624b7aa41605765fae6330e01f8b8..fa47a3128e83b94cf9cf97985febdd13c3c06b1a 100644 --- a/dataset_info/Airbot_MMK2_storage_braised_pork_belly_shrimp.yaml +++ b/dataset_info/Airbot_MMK2_storage_braised_pork_belly_shrimp.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: shrimp level1: prepared_dishes level2: shrimp @@ -51,30 +51,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the braised pork and shrimp on the plate +task_instruction: +- put the braised pork and shrimp on the plate sub_tasks: -- Place the braised pork in brown sauce into the plate with the left gripper -- Grasp the shrimp with the right gripper -- Place the shrimp into the plate with the right gripper -- Grasp the cake from the table and with the left gripper -- End -- Grasp the braised pork in brown sauce with the left gripper -- 'null' +- subtask: Place the braised pork in brown sauce into the plate with the left gripper + subtask_index: 0 +- subtask: Grasp the shrimp with the right gripper + subtask_index: 1 +- subtask: Place the shrimp into the plate with the right gripper + subtask_index: 2 +- subtask: Grasp the cake from the table and with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the braised pork in brown sauce with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 38 total_frames: 5834 fps: 30 @@ -185,11 +189,9 @@ data_structure: 'Airbot_MMK2_storage_braised_pork_belly_shrimp_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:37 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -459,7 +461,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -467,7 +469,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -494,193 +495,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_braised_pork_belly_shrimp - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the braised pork and shrimp on the plate - sub_tasks: - - subtask: Place the braised pork in brown sauce into the plate with the left gripper - subtask_index: 0 - - subtask: Grasp the shrimp with the right gripper - subtask_index: 1 - - subtask: Place the shrimp into the plate with the right gripper - subtask_index: 2 - - subtask: Grasp the cake from the table and with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the braised pork in brown sauce with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5834 - dataset_size: 277.59 MB - data_structure: 'Airbot_MMK2_storage_braised_pork_belly_shrimp_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (26 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_cake_both_hands.yaml b/dataset_info/Airbot_MMK2_storage_cake_both_hands.yaml index 14a10255900a4b773b0f990b7b54700bca7786b1..41e0db99e853cb49f484c20537dca77e93ad556b 100644 --- a/dataset_info/Airbot_MMK2_storage_cake_both_hands.yaml +++ b/dataset_info/Airbot_MMK2_storage_cake_both_hands.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,30 +51,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put two cakes in the lid. +task_instruction: +- put two cakes in the lid. sub_tasks: -- Grasp the cake with the left gripper -- Place the cake on the white basket with the right gripper -- Grasp the cake with the right gripper -- Place the cake on the white basket with the left gripper -- Static -- End -- 'null' +- subtask: Grasp the cake with the left gripper + subtask_index: 0 +- subtask: Place the cake on the white basket with the right gripper + subtask_index: 1 +- subtask: Grasp the cake with the right gripper + subtask_index: 2 +- subtask: Place the cake on the white basket with the left gripper + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 4445 fps: 30 @@ -126,11 +130,9 @@ data_structure: "Airbot_MMK2_storage_cake_both_hands_qced_hardlink/\n|-- annotat \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,134 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cake_both_hands - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put two cakes in the lid. - sub_tasks: - - subtask: Grasp the cake with the left gripper - subtask_index: 0 - - subtask: Place the cake on the white basket with the right gripper - subtask_index: 1 - - subtask: Grasp the cake with the right gripper - subtask_index: 2 - - subtask: Place the cake on the white basket with the left gripper - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 4445 - dataset_size: 188.05 MB - data_structure: "Airbot_MMK2_storage_cake_both_hands_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_cake_box.yaml b/dataset_info/Airbot_MMK2_storage_cake_box.yaml index 82c7ccab6879fcf15c59c14e8956e0cdd5807fcc..07448b6dea2a9b9b595aecdc854e5ad34fcd2b03 100644 --- a/dataset_info/Airbot_MMK2_storage_cake_box.yaml +++ b/dataset_info/Airbot_MMK2_storage_cake_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: flip_top_paper_boxes level1: packaging level2: flip_top_paper_boxes @@ -45,28 +45,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: open the box with left hand and put the cake in with your right. +task_instruction: +- open the box with left hand and put the cake in with your right. sub_tasks: -- End -- Grasp the cake with the right gripper -- Open the lid of the box with the left gripper -- Place the cake into the box with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the cake with the right gripper + subtask_index: 1 +- subtask: Open the lid of the box with the left gripper + subtask_index: 2 +- subtask: Place the cake into the box with the right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -75,13 +81,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -89,8 +92,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 10329 fps: 30 @@ -118,11 +120,9 @@ data_structure: "Airbot_MMK2_storage_cake_box_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +392,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +400,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,130 +426,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cake_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - open the box with left hand and put the cake in with your right. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the cake with the right gripper - subtask_index: 1 - - subtask: Open the lid of the box with the left gripper - subtask_index: 2 - - subtask: Place the cake into the box with the right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10329 - dataset_size: 283.14 MB - data_structure: "Airbot_MMK2_storage_cake_box_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_cake_cup.yaml b/dataset_info/Airbot_MMK2_storage_cake_cup.yaml index 7b8f59bfdc1f66ba6e81fc206089dc614b63ee16..1cfd7a6d22bbf316539ecae89f45f19877dd8b55 100644 --- a/dataset_info/Airbot_MMK2_storage_cake_cup.yaml +++ b/dataset_info/Airbot_MMK2_storage_cake_cup.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cup level1: cups level2: cup @@ -45,31 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the ice cream into the cup +task_instruction: +- put the ice cream into the cup sub_tasks: -- Place the cake into the blue cup with the right gripper -- Grasp the cake with the left gripper -- Abnormal -- Grasp the cake with the right gripper -- Static -- Place the cake into the blue cup with the left gripper -- End -- 'null' +- subtask: Place the cake into the blue cup with the right gripper + subtask_index: 0 +- subtask: Grasp the cake with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Grasp the cake with the right gripper + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: Place the cake into the blue cup with the left gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 18739 fps: 30 @@ -180,11 +185,9 @@ data_structure: 'Airbot_MMK2_storage_cake_cup_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -454,7 +457,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -462,7 +465,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -489,195 +491,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cake_cup - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: catering - level2: cafe - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the ice cream into the cup - sub_tasks: - - subtask: Place the cake into the blue cup with the right gripper - subtask_index: 0 - - subtask: Grasp the cake with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Grasp the cake with the right gripper - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: Place the cake into the blue cup with the left gripper - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 18739 - dataset_size: 730.92 MB - data_structure: 'Airbot_MMK2_storage_cake_cup_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_cake_ice_cream.yaml b/dataset_info/Airbot_MMK2_storage_cake_ice_cream.yaml index fafb4a76950131b3031e8a35860025b26ed3f64b..a9fe6d5e72c8b0bfb59a2ae1d789713e9a46bcd7 100644 --- a/dataset_info/Airbot_MMK2_storage_cake_ice_cream.yaml +++ b/dataset_info/Airbot_MMK2_storage_cake_ice_cream.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: plate level1: kitchen_supplies level2: plate @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the cake and ice cream into the plate respectively with left - and right hands. +task_instruction: +- put the cake and ice cream into the plate respectively with left and right hands. sub_tasks: -- Grasp the ice cream with the right gripper -- Place the ice cream into the white basket with the right gripper -- Grasp the cake with the left gripper -- End -- Place the cake into the white basket with the left gripper -- 'null' +- subtask: Grasp the ice cream with the right gripper + subtask_index: 0 +- subtask: Place the ice cream into the white basket with the right gripper + subtask_index: 1 +- subtask: Grasp the cake with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the cake into the white basket with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 11989 fps: 30 @@ -185,11 +187,9 @@ data_structure: 'Airbot_MMK2_storage_cake_ice_cream_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -459,7 +459,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -467,7 +467,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -494,191 +493,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cake_ice_cream - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the cake and ice cream into the plate respectively with left and right hands. - sub_tasks: - - subtask: Grasp the ice cream with the right gripper - subtask_index: 0 - - subtask: Place the ice cream into the white basket with the right gripper - subtask_index: 1 - - subtask: Grasp the cake with the left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the cake into the white basket with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11989 - dataset_size: 571.59 MB - data_structure: 'Airbot_MMK2_storage_cake_ice_cream_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_cake_pan.yaml b/dataset_info/Airbot_MMK2_storage_cake_pan.yaml index 28bf8e76c3fa22b3292e8fee05f0afc502773984..b2799cc9d722783b550148a4ea30d046717a5008 100644 --- a/dataset_info/Airbot_MMK2_storage_cake_pan.yaml +++ b/dataset_info/Airbot_MMK2_storage_cake_pan.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pan level1: kitchen_supplies level2: pan @@ -45,30 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the pot down with left hand and place the cake in it with right - hand. +task_instruction: +- put the pot down with left hand and place the cake in it with right hand. sub_tasks: -- Grasp the frying pan with the left gripper -- Grasp the bread with the right gripper -- End -- Place the bread on the frying pan with the right gripper -- Place the frying pan on the table with the left gripper -- 'null' +- subtask: Grasp the frying pan with the left gripper + subtask_index: 0 +- subtask: Grasp the bread with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the bread on the frying pan with the right gripper + subtask_index: 3 +- subtask: Place the frying pan on the table with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 8832 fps: 30 @@ -120,11 +122,9 @@ data_structure: "Airbot_MMK2_storage_cake_pan_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +394,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +402,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,132 +428,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cake_pan - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the pot down with left hand and place the cake in it with right hand. - sub_tasks: - - subtask: Grasp the frying pan with the left gripper - subtask_index: 0 - - subtask: Grasp the bread with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the bread on the frying pan with the right gripper - subtask_index: 3 - - subtask: Place the frying pan on the table with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8832 - dataset_size: 250.91 MB - data_structure: "Airbot_MMK2_storage_cake_pan_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_cake_plate.yaml b/dataset_info/Airbot_MMK2_storage_cake_plate.yaml index d68b11773e2f3cb9412495bd7552c2d7b49d267b..a1d7f68522bccf8fa9fbe04e3ff1a6ee14048b2d 100644 --- a/dataset_info/Airbot_MMK2_storage_cake_plate.yaml +++ b/dataset_info/Airbot_MMK2_storage_cake_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cake level1: bread level2: cake @@ -45,31 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the cake into the plate with left and right hand respectively. +task_instruction: +- put the cake into the plate with left and right hand respectively. sub_tasks: -- Abnormal -- Grasp the bagged cake with the left gripper -- Place the bagged cake on the white plate with the right gripper -- Static -- End -- Grasp the bagged cake with the right gripper -- Place the bagged cake on the white plate with the left gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Grasp the bagged cake with the left gripper + subtask_index: 1 +- subtask: Place the bagged cake on the white plate with the right gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the bagged cake with the right gripper + subtask_index: 5 +- subtask: Place the bagged cake on the white plate with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 6430 fps: 30 @@ -121,11 +126,9 @@ data_structure: "Airbot_MMK2_storage_cake_plate_qced_hardlink/\n|-- annotations\ \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,136 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cake_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the cake into the plate with left and right hand respectively. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Grasp the bagged cake with the left gripper - subtask_index: 1 - - subtask: Place the bagged cake on the white plate with the right gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the bagged cake with the right gripper - subtask_index: 5 - - subtask: Place the bagged cake on the white plate with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6430 - dataset_size: 280.75 MB - data_structure: "Airbot_MMK2_storage_cake_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_cookie_cup.yaml b/dataset_info/Airbot_MMK2_storage_cookie_cup.yaml index 24d520d5eb4d7dc121a50a0ced585b228447bcf7..9095a4727be7c265e94a3d050692e5f23aa95c9c 100644 --- a/dataset_info/Airbot_MMK2_storage_cookie_cup.yaml +++ b/dataset_info/Airbot_MMK2_storage_cookie_cup.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: plate level1: kitchen_supplies level2: plate @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the cookies and beer into the plate respectively with left and - right hands. +task_instruction: +- put the cookies and beer into the plate respectively with left and right hands. sub_tasks: -- Place the beer mug on the white basket with the right gripper -- Place the bagged cookies on the white basket with the left gripper -- Grasp the bagged cookies with the left gripper -- End -- Grasp the beer mug with the right gripper -- 'null' +- subtask: Place the beer mug on the white basket with the right gripper + subtask_index: 0 +- subtask: Place the bagged cookies on the white basket with the left gripper + subtask_index: 1 +- subtask: Grasp the bagged cookies with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Grasp the beer mug with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 9983 fps: 30 @@ -126,11 +128,9 @@ data_structure: "Airbot_MMK2_storage_cookie_cup_qced_hardlink/\n|-- annotations\ \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,132 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cookie_cup - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the cookies and beer into the plate respectively with left and right hands. - sub_tasks: - - subtask: Place the beer mug on the white basket with the right gripper - subtask_index: 0 - - subtask: Place the bagged cookies on the white basket with the left gripper - subtask_index: 1 - - subtask: Grasp the bagged cookies with the left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Grasp the beer mug with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9983 - dataset_size: 488.86 MB - data_structure: "Airbot_MMK2_storage_cookie_cup_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_cookie_toy_car.yaml b/dataset_info/Airbot_MMK2_storage_cookie_toy_car.yaml index 901dc80736ea68401040a78d4bf882a1f6a3014d..44e760798f69b630af329bdba3393a54f304fbe9 100644 --- a/dataset_info/Airbot_MMK2_storage_cookie_toy_car.yaml +++ b/dataset_info/Airbot_MMK2_storage_cookie_toy_car.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -57,32 +57,43 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the toy car with left hand and put it in the basket, then - pick up the cookie with right hand and put it in the basket. +task_instruction: +- pick up the toy car with left hand and put it in the basket, then pick up the cookie + with right hand and put it in the basket. sub_tasks: -- Lift the toy car with left gripper and lift the cookies with the right gripper -- Grasp the toy car with left gripper and grasp the cookies with the right gripper -- Place the toy car on the pink plate with the left gripper -- Abnormal -- Place the cookies on the pink plate with the right gripper -- Static -- End -- 'null' +- subtask: Lift the toy car with left gripper and lift the cookies with the right + gripper + subtask_index: 0 +- subtask: Grasp the toy car with left gripper and grasp the cookies with the right + gripper + subtask_index: 1 +- subtask: Place the toy car on the pink plate with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Place the cookies on the pink plate with the right gripper + subtask_index: 4 +- subtask: Static + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -91,13 +102,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -105,8 +113,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 8388 fps: 30 @@ -134,11 +141,9 @@ data_structure: "Airbot_MMK2_storage_cookie_toy_car_qced_hardlink/\n|-- annotati \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -408,7 +413,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -416,7 +421,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -443,139 +447,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cookie_toy_car - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the toy car with left hand and put it in the basket, then pick up the - cookie with right hand and put it in the basket. - sub_tasks: - - subtask: Lift the toy car with left gripper and lift the cookies with the right - gripper - subtask_index: 0 - - subtask: Grasp the toy car with left gripper and grasp the cookies with the right - gripper - subtask_index: 1 - - subtask: Place the toy car on the pink plate with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Place the cookies on the pink plate with the right gripper - subtask_index: 4 - - subtask: Static - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8388 - dataset_size: 277.71 MB - data_structure: "Airbot_MMK2_storage_cookie_toy_car_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_cup.yaml b/dataset_info/Airbot_MMK2_storage_cup.yaml index 5f8aec0961bbd8efb4c8fce6848b7b7cf2dc7555..49d127b50bf66e5d43d52af2b644dfc58a542416 100644 --- a/dataset_info/Airbot_MMK2_storage_cup.yaml +++ b/dataset_info/Airbot_MMK2_storage_cup.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: any_cup level1: cups level2: any_cup @@ -45,34 +45,46 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the two cups in the basin. +task_instruction: +- put the two cups in the basin. sub_tasks: -- place the cup in the yellow basket use the right gripper -- Grasp the cup the left gripper -- Lift the cup the right gripper -- place the cup in the basin use the left gripper -- Grasp the cup the right gripper -- place the cup in the yellow basket use the left gripper -- place the cup in the basin use the right gripper -- Static -- Lift the cup the left gripper -- End -- 'null' +- subtask: place the cup in the yellow basket use the right gripper + subtask_index: 0 +- subtask: Grasp the cup the left gripper + subtask_index: 1 +- subtask: Lift the cup the right gripper + subtask_index: 2 +- subtask: place the cup in the basin use the left gripper + subtask_index: 3 +- subtask: Grasp the cup the right gripper + subtask_index: 4 +- subtask: place the cup in the yellow basket use the left gripper + subtask_index: 5 +- subtask: place the cup in the basin use the right gripper + subtask_index: 6 +- subtask: Static + subtask_index: 7 +- subtask: Lift the cup the left gripper + subtask_index: 8 +- subtask: End + subtask_index: 9 +- subtask: 'null' + subtask_index: 10 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 20190 fps: 30 @@ -123,11 +131,9 @@ data_structure: "Airbot_MMK2_storage_cup_qced_hardlink/\n|-- annotations\n| |- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -397,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -405,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -432,142 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cup - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the two cups in the basin. - sub_tasks: - - subtask: place the cup in the yellow basket use the right gripper - subtask_index: 0 - - subtask: Grasp the cup the left gripper - subtask_index: 1 - - subtask: Lift the cup the right gripper - subtask_index: 2 - - subtask: place the cup in the basin use the left gripper - subtask_index: 3 - - subtask: Grasp the cup the right gripper - subtask_index: 4 - - subtask: place the cup in the yellow basket use the left gripper - subtask_index: 5 - - subtask: place the cup in the basin use the right gripper - subtask_index: 6 - - subtask: Static - subtask_index: 7 - - subtask: Lift the cup the left gripper - subtask_index: 8 - - subtask: End - subtask_index: 9 - - subtask: 'null' - subtask_index: 10 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 20190 - dataset_size: 802.42 MB - data_structure: "Airbot_MMK2_storage_cup_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_cup_plate.yaml b/dataset_info/Airbot_MMK2_storage_cup_plate.yaml index 64d928c3f41dfc740448ebcc6e88fbfc66f1a9b1..0c39552736ea10cabf8d578b76679abb42db9a49 100644 --- a/dataset_info/Airbot_MMK2_storage_cup_plate.yaml +++ b/dataset_info/Airbot_MMK2_storage_cup_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: any_cup level1: cups level2: any_cup @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the cups on the table by hand and place them on the white - plate. +task_instruction: +- pick up the cups on the table by hand and place them on the white plate. sub_tasks: -- Grasp the purple cup with the right gripper -- Grasp the blue cup with the left gripper -- Place the blue cup on the plate with the left gripper -- Abnormal -- Place the purple cup on the blue cup with the right gripper -- End -- 'null' +- subtask: Grasp the purple cup with the right gripper + subtask_index: 0 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 1 +- subtask: Place the blue cup on the plate with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Place the purple cup on the blue cup with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - clip - place - lift -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 44 total_frames: 11727 fps: 30 @@ -121,11 +124,9 @@ data_structure: "Airbot_MMK2_storage_cup_plate_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:43 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,134 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cup_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the cups on the table by hand and place them on the white plate. - sub_tasks: - - subtask: Grasp the purple cup with the right gripper - subtask_index: 0 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 1 - - subtask: Place the blue cup on the plate with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Place the purple cup on the blue cup with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - clip - - place - - lift - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11727 - dataset_size: 500.23 MB - data_structure: "Airbot_MMK2_storage_cup_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (32 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_cup_rubik's_cube.yaml b/dataset_info/Airbot_MMK2_storage_cup_rubik's_cube.yaml index a025544218f710c49e89060079b8503e5b01bb5f..bd9f0e96055bd8d69d896649380c448c65e9716e 100644 --- a/dataset_info/Airbot_MMK2_storage_cup_rubik's_cube.yaml +++ b/dataset_info/Airbot_MMK2_storage_cup_rubik's_cube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: plate level1: kitchen_supplies level2: plate @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the rubik's Cube into the plate with right hand and place the - cup on top of the cube with left hand. +task_instruction: +- put the rubik's Cube into the plate with right hand and place the cup on top of + the cube with left hand. sub_tasks: -- Grasp the paper cup with the left gripper -- Place the paper cup on the magic cube with the left gripper -- Place the magic cube on the plate with the right gripper -- Abnormal -- Grasp the magic cube with the right gripper -- End -- 'null' +- subtask: Grasp the paper cup with the left gripper + subtask_index: 0 +- subtask: Place the paper cup on the magic cube with the left gripper + subtask_index: 1 +- subtask: Place the magic cube on the plate with the right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Grasp the magic cube with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 13787 fps: 30 @@ -127,11 +131,9 @@ data_structure: "Airbot_MMK2_storage_cup_rubik_s_cube_qced_hardlink/\n|-- annota \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_cup_rubik's_cube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the rubik's Cube into the plate with right hand and place the cup on top of - the cube with left hand. - sub_tasks: - - subtask: Grasp the paper cup with the left gripper - subtask_index: 0 - - subtask: Place the paper cup on the magic cube with the left gripper - subtask_index: 1 - - subtask: Place the magic cube on the plate with the right gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Grasp the magic cube with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13787 - dataset_size: 387.14 MB - data_structure: "Airbot_MMK2_storage_cup_rubik_s_cube_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_diamond_ring.yaml b/dataset_info/Airbot_MMK2_storage_diamond_ring.yaml index ea27ac11b6cbf6ffa20d95777a13a3c384cde84f..d4d543b7248eb9435a63052fce9d15d85d627853 100644 --- a/dataset_info/Airbot_MMK2_storage_diamond_ring.yaml +++ b/dataset_info/Airbot_MMK2_storage_diamond_ring.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: diamond_ring level1: daily_necessities level2: diamond_ring @@ -45,29 +45,35 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the ring into the box. +task_instruction: +- put the ring into the box. sub_tasks: -- Place the diamond ring in the box with the right gripper -- End -- Close the lid of the box with the left gripper -- Grasp the diamond ring with the right gripper -- 'null' +- subtask: Place the diamond ring in the box with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Close the lid of the box with the left gripper + subtask_index: 2 +- subtask: Grasp the diamond ring with the right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - place - pick - turn -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +82,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +93,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 9131 fps: 30 @@ -119,11 +121,9 @@ data_structure: "Airbot_MMK2_storage_diamond_ring_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,131 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_diamond_ring - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the ring into the box. - sub_tasks: - - subtask: Place the diamond ring in the box with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Close the lid of the box with the left gripper - subtask_index: 2 - - subtask: Grasp the diamond ring with the right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - place - - pick - - turn - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9131 - dataset_size: 426.97 MB - data_structure: "Airbot_MMK2_storage_diamond_ring_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_egg_bowl.yaml b/dataset_info/Airbot_MMK2_storage_egg_bowl.yaml index 1ccd9d9aa6dde9bf35e920c3cc3c7dcd21deab93..53f33762ba91334bb1691f4bef398e1e48c05573 100644 --- a/dataset_info/Airbot_MMK2_storage_egg_bowl.yaml +++ b/dataset_info/Airbot_MMK2_storage_egg_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: egg level1: food level2: egg @@ -45,30 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use hands to pick the egg on the table and place them into the bowl. +task_instruction: +- use hands to pick the egg on the table and place them into the bowl. sub_tasks: -- Grasp the egg with the left gripper -- Place the egg into bowl with the right gripper -- Abnormal -- Place the egg into bowl with the left gripper -- End -- Grasp the egg with the right gripper -- 'null' +- subtask: Grasp the egg with the left gripper + subtask_index: 0 +- subtask: Place the egg into bowl with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Place the egg into bowl with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the egg with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 11572 fps: 30 @@ -120,11 +124,9 @@ data_structure: "Airbot_MMK2_storage_egg_bowl_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,134 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_egg_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use hands to pick the egg on the table and place them into the bowl. - sub_tasks: - - subtask: Grasp the egg with the left gripper - subtask_index: 0 - - subtask: Place the egg into bowl with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Place the egg into bowl with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the egg with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11572 - dataset_size: 444.71 MB - data_structure: "Airbot_MMK2_storage_egg_bowl_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_egg_plate.yaml b/dataset_info/Airbot_MMK2_storage_egg_plate.yaml index 6d877abd5b14e8e1ee92feaa5ec670fc17bf7900..73b4a5a08abc299c65aaca5dd6a6ff178542e141 100644 --- a/dataset_info/Airbot_MMK2_storage_egg_plate.yaml +++ b/dataset_info/Airbot_MMK2_storage_egg_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: egg level1: food level2: egg @@ -45,31 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use hands to pick the egg on the table and place them into the plate. +task_instruction: +- use hands to pick the egg on the table and place them into the plate. sub_tasks: -- Place the egg into the plate with the right gripper -- Grasp the egg with the left gripper -- Place the egg into the plate with the left gripper -- Abnormal -- Place the cake into the plate with the right gripper -- End -- Grasp the egg with the right gripper -- 'null' +- subtask: Place the egg into the plate with the right gripper + subtask_index: 0 +- subtask: Grasp the egg with the left gripper + subtask_index: 1 +- subtask: Place the egg into the plate with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Place the cake into the plate with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the egg with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 45 total_frames: 7564 fps: 30 @@ -121,11 +126,9 @@ data_structure: "Airbot_MMK2_storage_egg_plate_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:44 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,136 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_egg_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use hands to pick the egg on the table and place them into the plate. - sub_tasks: - - subtask: Place the egg into the plate with the right gripper - subtask_index: 0 - - subtask: Grasp the egg with the left gripper - subtask_index: 1 - - subtask: Place the egg into the plate with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Place the cake into the plate with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the egg with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7564 - dataset_size: 345.41 MB - data_structure: "Airbot_MMK2_storage_egg_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (33 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml b/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml index aa67b0a7b4e77ed795c44ba9a53c6cfddc70deb2..8b998c18a70413d4cb56c260e96f5c32c69389d2 100644 --- a/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml +++ b/dataset_info/Airbot_MMK2_storage_egg_white_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: egg level1: food level2: eggs @@ -45,31 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use hands to pick the egg on the table and place them into the storage - box. +task_instruction: +- use hands to pick the egg on the table and place them into the storage box. sub_tasks: -- Place the egg into the left compartment of the storage box with the left gripper -- Grasp the egg with the left gripper -- End -- Grasp the egg with the right gripper -- Abnormal -- Place the egg into the right compartment of the storage box with the right gripper -- 'null' +- subtask: Place the egg into the left compartment of the storage box with the left + gripper + subtask_index: 0 +- subtask: Grasp the egg with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the egg with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Place the egg into the right compartment of the storage box with the right + gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 43 total_frames: 6645 fps: 30 @@ -121,11 +126,9 @@ data_structure: "Airbot_MMK2_storage_egg_white_box_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:42 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,136 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_egg_white_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use hands to pick the egg on the table and place them into the storage box. - sub_tasks: - - subtask: Place the egg into the left compartment of the storage box with the left - gripper - subtask_index: 0 - - subtask: Grasp the egg with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the egg with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Place the egg into the right compartment of the storage box with the - right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6645 - dataset_size: 173.88 MB - data_structure: "Airbot_MMK2_storage_egg_white_box_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (31 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_egg_yellow_box.yaml b/dataset_info/Airbot_MMK2_storage_egg_yellow_box.yaml index 69c46bc360ac33d9d9b21949ac2a1fc598b13b30..5f6bdd9a5f848d335655da2465eb29faf3d7091c 100644 --- a/dataset_info/Airbot_MMK2_storage_egg_yellow_box.yaml +++ b/dataset_info/Airbot_MMK2_storage_egg_yellow_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: egg level1: food level2: egg @@ -45,30 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use hands to pick the egg on the table and place them into the egg - carton. +task_instruction: +- use hands to pick the egg on the table and place them into the egg carton. sub_tasks: -- Close the lid of the egg storage box with the left gripper -- End -- Place the egg into the egg storage box with the right gripper -- Grasp the egg with the right gripper -- Abnormal -- 'null' +- subtask: Close the lid of the egg storage box with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the egg into the egg storage box with the right gripper + subtask_index: 2 +- subtask: Grasp the egg with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 249 total_frames: 68503 fps: 30 @@ -120,11 +122,9 @@ data_structure: "Airbot_MMK2_storage_egg_yellow_box_qced_hardlink/\n|-- annotati \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:248 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +394,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +402,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,132 +428,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_egg_yellow_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use hands to pick the egg on the table and place them into the egg carton. - sub_tasks: - - subtask: Close the lid of the egg storage box with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the egg into the egg storage box with the right gripper - subtask_index: 2 - - subtask: Grasp the egg with the right gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 68503 - dataset_size: 2.69 GB - data_structure: "Airbot_MMK2_storage_egg_yellow_box_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (237 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_electronics_white_basket.yaml b/dataset_info/Airbot_MMK2_storage_electronics_white_basket.yaml index 9404daf1018790b42fe4eaa430fcf3d8d3fe7eb9..dd5cba71951911d03a5fad9fd50c3b94908cea6a 100644 --- a/dataset_info/Airbot_MMK2_storage_electronics_white_basket.yaml +++ b/dataset_info/Airbot_MMK2_storage_electronics_white_basket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_boxes level1: packaging level2: paper_boxes @@ -45,29 +45,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the mouse box and the calculator box into the white basket by - hand. +task_instruction: +- put the mouse box and the calculator box into the white basket by hand. sub_tasks: -- Deliver the calculator box from left gripper to right gripper -- End -- Grasp the calculator box with the left gripper -- Place the calculator box into the white basket with the right gripper -- 'null' +- subtask: Deliver the calculator box from left gripper to right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the calculator box with the left gripper + subtask_index: 2 +- subtask: Place the calculator box into the white basket with the right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +81,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +92,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 10024 fps: 30 @@ -119,11 +120,9 @@ data_structure: "Airbot_MMK2_storage_electronics_white_basket_qced_hardlink/\n|- \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +392,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +400,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,130 +426,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_electronics_white_basket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: industry - level2: factory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the mouse box and the calculator box into the white basket by hand. - sub_tasks: - - subtask: Deliver the calculator box from left gripper to right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the calculator box with the left gripper - subtask_index: 2 - - subtask: Place the calculator box into the white basket with the right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10024 - dataset_size: 363.92 MB - data_structure: "Airbot_MMK2_storage_electronics_white_basket_qced_hardlink/\n|--\ - \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_electronics_yellow_baket.yaml b/dataset_info/Airbot_MMK2_storage_electronics_yellow_baket.yaml index d75a0eba9dba2b2c33e056dcc70069bf1ec11d8e..3d45f082757b56e6103eff76f5ad5b7f2a74d212 100644 --- a/dataset_info/Airbot_MMK2_storage_electronics_yellow_baket.yaml +++ b/dataset_info/Airbot_MMK2_storage_electronics_yellow_baket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_boxes level1: packaging level2: paper_boxes @@ -45,36 +45,50 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the electronic products components into the yellow basket. +task_instruction: +- put the electronic products components into the yellow basket. sub_tasks: -- Place the calculator box into the storage box with the left gripper -- Grasp the calculator box with the left gripper -- Place the calculator box into the storage box with the right gripper -- Place the phone case box into the storage box with the right gripper -- Place the phone case box in the yellow box with the right gripper -- Place the phone case box into the storage box with the left gripper -- Abnormal -- Place the calculator box in the yellow box with the left gripper -- Grasp the calculator box with the right gripper -- Grasp the phone case box with the right gripper -- End -- Grasp the phone case box with the left gripper -- 'null' +- subtask: Place the calculator box into the storage box with the left gripper + subtask_index: 0 +- subtask: Grasp the calculator box with the left gripper + subtask_index: 1 +- subtask: Place the calculator box into the storage box with the right gripper + subtask_index: 2 +- subtask: Place the phone case box into the storage box with the right gripper + subtask_index: 3 +- subtask: Place the phone case box in the yellow box with the right gripper + subtask_index: 4 +- subtask: Place the phone case box into the storage box with the left gripper + subtask_index: 5 +- subtask: Abnormal + subtask_index: 6 +- subtask: Place the calculator box in the yellow box with the left gripper + subtask_index: 7 +- subtask: Grasp the calculator box with the right gripper + subtask_index: 8 +- subtask: Grasp the phone case box with the right gripper + subtask_index: 9 +- subtask: End + subtask_index: 10 +- subtask: Grasp the phone case box with the left gripper + subtask_index: 11 +- subtask: 'null' + subtask_index: 12 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +97,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +108,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 25739 fps: 30 @@ -126,11 +136,9 @@ data_structure: "Airbot_MMK2_storage_electronics_yellow_baket_qced_hardlink/\n|- \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +408,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +416,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,146 +442,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_electronics_yellow_baket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the electronic products components into the yellow basket. - sub_tasks: - - subtask: Place the calculator box into the storage box with the left gripper - subtask_index: 0 - - subtask: Grasp the calculator box with the left gripper - subtask_index: 1 - - subtask: Place the calculator box into the storage box with the right gripper - subtask_index: 2 - - subtask: Place the phone case box into the storage box with the right gripper - subtask_index: 3 - - subtask: Place the phone case box in the yellow box with the right gripper - subtask_index: 4 - - subtask: Place the phone case box into the storage box with the left gripper - subtask_index: 5 - - subtask: Abnormal - subtask_index: 6 - - subtask: Place the calculator box in the yellow box with the left gripper - subtask_index: 7 - - subtask: Grasp the calculator box with the right gripper - subtask_index: 8 - - subtask: Grasp the phone case box with the right gripper - subtask_index: 9 - - subtask: End - subtask_index: 10 - - subtask: Grasp the phone case box with the left gripper - subtask_index: 11 - - subtask: 'null' - subtask_index: 12 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 25739 - dataset_size: 879.43 MB - data_structure: "Airbot_MMK2_storage_electronics_yellow_baket_qced_hardlink/\n|--\ - \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_gold_bar_model_shark_doll.yaml b/dataset_info/Airbot_MMK2_storage_gold_bar_model_shark_doll.yaml index 38d89176f8bc49b506444e8befad6c1b21149cbe..c06b1861af96ff38865402db9a3daf9fd2fd20aa 100644 --- a/dataset_info/Airbot_MMK2_storage_gold_bar_model_shark_doll.yaml +++ b/dataset_info/Airbot_MMK2_storage_gold_bar_model_shark_doll.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_boxes level1: packaging level2: paper_boxes @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the shark doll and the gold bar into the paper box respectively - with left and right hands. +task_instruction: +- put the shark doll and the gold bar into the paper box respectively with left and + right hands. sub_tasks: -- End -- Place the whale on the paper box with the left gripper -- Place the gold bar on the paper box with the right gripper -- Grasp the whale with the left gripper -- Grasp the gold bar with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the whale on the paper box with the left gripper + subtask_index: 1 +- subtask: Place the gold bar on the paper box with the right gripper + subtask_index: 2 +- subtask: Grasp the whale with the left gripper + subtask_index: 3 +- subtask: Grasp the gold bar with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 44 total_frames: 5620 fps: 30 @@ -126,11 +129,9 @@ data_structure: "Airbot_MMK2_storage_gold_bar_model_shark_doll_qced_hardlink/\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:43 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +401,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +409,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,133 +435,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_gold_bar_model_shark_doll - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the shark doll and the gold bar into the paper box respectively with left - and right hands. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the whale on the paper box with the left gripper - subtask_index: 1 - - subtask: Place the gold bar on the paper box with the right gripper - subtask_index: 2 - - subtask: Grasp the whale with the left gripper - subtask_index: 3 - - subtask: Grasp the gold bar with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5620 - dataset_size: 240.37 MB - data_structure: "Airbot_MMK2_storage_gold_bar_model_shark_doll_qced_hardlink/\n\ - |-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (32 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_grape.yaml b/dataset_info/Airbot_MMK2_storage_grape.yaml index 43bc60554b2f56312852a8059816b6f494bc6848..8d285763adecce2e420376fac4ebe03fdc27ad0c 100644 --- a/dataset_info/Airbot_MMK2_storage_grape.yaml +++ b/dataset_info/Airbot_MMK2_storage_grape.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: grape level1: fruits level2: grape @@ -45,27 +45,32 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the grapes into the white box by hand. +task_instruction: +- put the grapes into the white box by hand. sub_tasks: -- End -- Grasp the grape with the right gripper -- Place the grape into the storage box with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the grape with the right gripper + subtask_index: 1 +- subtask: Place the grape into the storage box with the right gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - pinch - place - lift -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -74,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -88,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 5721 fps: 30 @@ -116,11 +117,9 @@ data_structure: "Airbot_MMK2_storage_grape_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -390,7 +389,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -398,7 +397,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -425,128 +423,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_grape - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the grapes into the white box by hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the grape with the right gripper - subtask_index: 1 - - subtask: Place the grape into the storage box with the right gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - pinch - - place - - lift - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5721 - dataset_size: 150.48 MB - data_structure: "Airbot_MMK2_storage_grape_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ - \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_hourglass.yaml b/dataset_info/Airbot_MMK2_storage_hourglass.yaml index 5f1d8804e527e7c2eab06e80f2c8e4db0db23f49..060c70be11bd1989045f3bbb125a48dc75f93259 100644 --- a/dataset_info/Airbot_MMK2_storage_hourglass.yaml +++ b/dataset_info/Airbot_MMK2_storage_hourglass.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: hourglass level1: toys level2: hourglass @@ -45,28 +45,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the hourglass in the storage box with right hand. +task_instruction: +- place the hourglass in the storage box with right hand. sub_tasks: -- End -- Place the hourglass into the right compartment of the storage box with the right - gripper -- Grasp the hourglass with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the hourglass into the right compartment of the storage box with + the right gripper + subtask_index: 1 +- subtask: Grasp the hourglass with the right gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -75,13 +80,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -89,8 +91,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 7471 fps: 30 @@ -118,11 +119,9 @@ data_structure: "Airbot_MMK2_storage_hourglass_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +391,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +399,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,129 +425,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_hourglass - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the hourglass in the storage box with right hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the hourglass into the right compartment of the storage box with - the right gripper - subtask_index: 1 - - subtask: Grasp the hourglass with the right gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7471 - dataset_size: 236.31 MB - data_structure: "Airbot_MMK2_storage_hourglass_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_ice_cream.yaml b/dataset_info/Airbot_MMK2_storage_ice_cream.yaml index 75e15f2762af3a3477a345f42d1f99e97b0729f6..fa5b972145504dd9a7ad31906d6601d935a11026 100644 --- a/dataset_info/Airbot_MMK2_storage_ice_cream.yaml +++ b/dataset_info/Airbot_MMK2_storage_ice_cream.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: ice_cream level1: snacks level2: ice_cream @@ -45,32 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the ice cream into the storage box with left and right hands - respectively. +task_instruction: +- put the ice cream into the storage box with left and right hands respectively. sub_tasks: -- Grasp the ice cream with the right gripper -- Place the ice cream into the white basket with the right gripper -- Abnormal -- Static -- Place the ice cream into the white basket with the left gripper -- End -- Grasp the ice cream with the left gripper -- 'null' +- subtask: Grasp the ice cream with the right gripper + subtask_index: 0 +- subtask: Place the ice cream into the white basket with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Place the ice cream into the white basket with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the ice cream with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 7639 fps: 30 @@ -122,11 +126,9 @@ data_structure: "Airbot_MMK2_storage_ice_cream_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,136 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_ice_cream - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the ice cream into the storage box with left and right hands respectively. - sub_tasks: - - subtask: Grasp the ice cream with the right gripper - subtask_index: 0 - - subtask: Place the ice cream into the white basket with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Place the ice cream into the white basket with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the ice cream with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7639 - dataset_size: 307.05 MB - data_structure: "Airbot_MMK2_storage_ice_cream_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_lemon_mango.yaml b/dataset_info/Airbot_MMK2_storage_lemon_mango.yaml index c01f2ec522b76dcca1ad85acbf667f72b2654148..79a9e5c328dac87461447601c0c98eb1b5f15f02 100644 --- a/dataset_info/Airbot_MMK2_storage_lemon_mango.yaml +++ b/dataset_info/Airbot_MMK2_storage_lemon_mango.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: lemon level1: fruits level2: lemon @@ -51,31 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: grab the mango with left hand and the lemon with right hand, and - put them into the storage box. +task_instruction: +- grab the mango with left hand and the lemon with right hand, and put them into the + storage box. sub_tasks: -- End -- Place the lemon into the right compartment of the storage box with the right gripper -- Place the mango into the left compartment of the storage box with the left gripper -- Grasp the mango with the left gripper -- Grasp the lemon with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the lemon into the right compartment of the storage box with the + right gripper + subtask_index: 1 +- subtask: Place the mango into the left compartment of the storage box with the + left gripper + subtask_index: 2 +- subtask: Grasp the mango with the left gripper + subtask_index: 3 +- subtask: Grasp the lemon with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - pinch - place - clip - lift -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 7685 fps: 30 @@ -127,11 +132,9 @@ data_structure: "Airbot_MMK2_storage_lemon_mango_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +404,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +412,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,136 +438,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_lemon_mango - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - grab the mango with left hand and the lemon with right hand, and put them into - the storage box. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the lemon into the right compartment of the storage box with the - right gripper - subtask_index: 1 - - subtask: Place the mango into the left compartment of the storage box with the - left gripper - subtask_index: 2 - - subtask: Grasp the mango with the left gripper - subtask_index: 3 - - subtask: Grasp the lemon with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - pinch - - place - - clip - - lift - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7685 - dataset_size: 201.65 MB - data_structure: "Airbot_MMK2_storage_lemon_mango_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_mango_pomegranate.yaml b/dataset_info/Airbot_MMK2_storage_mango_pomegranate.yaml index 8bddb794c388893267ae600763eb2bd89d54decf..aa8e5138b94844c3c2465b2e7d315cd5fb6c157e 100644 --- a/dataset_info/Airbot_MMK2_storage_mango_pomegranate.yaml +++ b/dataset_info/Airbot_MMK2_storage_mango_pomegranate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pomegranate level1: fruit level2: pomegranate @@ -51,36 +51,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the pomegranate with left hand and put it in the storage - box, and pick up the mango with right hand and put it in the storage box. +task_instruction: +- pick up the pomegranate with left hand and put it in the storage box, and pick up + the mango with right hand and put it in the storage box. sub_tasks: -- Place the pomegranate into the left compartment of the storage box with the left - gripper -- Grasp the pomegranate with the left gripper -- Grasp a mango with the right gripper -- Static -- Place the pomegranate into the left compartment of the storage box with the left - gripper -- Grasp a pomegranate with the left gripper -- Grasp the mango with the right gripper -- End -- Place the mango into the right compartment of the storage box with the right gripper -- 'null' +- subtask: Place the pomegranate into the left compartment of the storage box with + the left gripper + subtask_index: 0 +- subtask: Grasp the pomegranate with the left gripper + subtask_index: 1 +- subtask: Grasp a mango with the right gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Place the pomegranate into the left compartment of the storage box with + the left gripper + subtask_index: 4 +- subtask: Grasp a pomegranate with the left gripper + subtask_index: 5 +- subtask: Grasp the mango with the right gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Place the mango into the right compartment of the storage box with the + right gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -89,13 +101,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -103,8 +112,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 90 total_frames: 28506 fps: 30 @@ -132,11 +140,9 @@ data_structure: "Airbot_MMK2_Airbot_MMK2_storage_mango_pomegranate_qced_hardlink \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:89 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -406,7 +412,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -414,7 +420,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -441,144 +446,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_mango_pomegranate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the pomegranate with left hand and put it in the storage box, and pick - up the mango with right hand and put it in the storage box. - sub_tasks: - - subtask: Place the pomegranate into the left compartment of the storage box with - the left gripper - subtask_index: 0 - - subtask: Grasp the pomegranate with the left gripper - subtask_index: 1 - - subtask: Grasp a mango with the right gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Place the pomegranate into the left compartment of the storage box with - the left gripper - subtask_index: 4 - - subtask: Grasp a pomegranate with the left gripper - subtask_index: 5 - - subtask: Grasp the mango with the right gripper - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: Place the mango into the right compartment of the storage box with the - right gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 28506 - dataset_size: 895.99 MB - data_structure: "Airbot_MMK2_Airbot_MMK2_storage_mango_pomegranate_qced_hardlink/\n\ - |-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (78 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml b/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml index 494b3f5cbf46ef6e81f3ba3674d326c2ec438a99..5eec4a0fd794cfde5b056eaf48e237d26d294315 100644 --- a/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml +++ b/dataset_info/Airbot_MMK2_storage_milk_tissue.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -57,32 +57,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the tissue with left hand and put it in the basket, then - pick up the milk with right hand and put it in the basket. +task_instruction: +- pick up the tissue with left hand and put it in the basket, then pick up the milk + with right hand and put it in the basket. sub_tasks: -- Place the milk on the white basket with the right gripper -- Place the tissue on the white basket with the left gripper -- Abnormal -- Static -- Grasp the milk with the right gripper -- End -- Grasp the tissue with the left gripper -- 'null' +- subtask: Place the milk on the white basket with the right gripper + subtask_index: 0 +- subtask: Place the tissue on the white basket with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Grasp the milk with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the tissue with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -91,13 +100,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -105,8 +111,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 10048 fps: 30 @@ -134,11 +139,9 @@ data_structure: "Airbot_MMK2_storage_milk_tissue_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -408,7 +411,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -416,7 +419,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -443,137 +445,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_milk_tissue - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the tissue with left hand and put it in the basket, then pick up the milk - with right hand and put it in the basket. - sub_tasks: - - subtask: Place the milk on the white basket with the right gripper - subtask_index: 0 - - subtask: Place the tissue on the white basket with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Grasp the milk with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the tissue with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10048 - dataset_size: 350.34 MB - data_structure: "Airbot_MMK2_storage_milk_tissue_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_network_cable_paper_box.yaml b/dataset_info/Airbot_MMK2_storage_network_cable_paper_box.yaml index 33ba5464ad9b55cfd8a449ec4868f049e30bf8e5..a51eef2e144fad590aea406e776aa10aa72f0585 100644 --- a/dataset_info/Airbot_MMK2_storage_network_cable_paper_box.yaml +++ b/dataset_info/Airbot_MMK2_storage_network_cable_paper_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: box level1: home_storage level2: box @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the box with left hand and put it in the lid, and pick up - the cable with right hand and put it in the lid. +task_instruction: +- pick up the box with left hand and put it in the lid, and pick up the cable with + right hand and put it in the lid. sub_tasks: -- Grasp the mouse box with the left gripper -- End -- Place the mouse box into the carton with the left gripper -- Place the network cable into the carton with the right gripper -- Grasp the network cable with the right gripper -- 'null' +- subtask: Grasp the mouse box with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the mouse box into the carton with the left gripper + subtask_index: 2 +- subtask: Place the network cable into the carton with the right gripper + subtask_index: 3 +- subtask: Grasp the network cable with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 60 total_frames: 17028 fps: 30 @@ -126,11 +129,9 @@ data_structure: "Airbot_MMK2_storage_network_cable_paper_box_qced_hardlink/\n|-- \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:59 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +401,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +409,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,133 +435,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_network_cable_paper_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the box with left hand and put it in the lid, and pick up the cable with - right hand and put it in the lid. - sub_tasks: - - subtask: Grasp the mouse box with the left gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the mouse box into the carton with the left gripper - subtask_index: 2 - - subtask: Place the network cable into the carton with the right gripper - subtask_index: 3 - - subtask: Grasp the network cable with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 17028 - dataset_size: 729.48 MB - data_structure: "Airbot_MMK2_storage_network_cable_paper_box_qced_hardlink/\n|--\ - \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (48 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml b/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml index d4dd549c4f2affa52b0a84f36ecf6244ad63403d..f08b15a5ca8543fb5bf07785072e9552ac190233 100644 --- a/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml +++ b/dataset_info/Airbot_MMK2_storage_onion_sweet_potato.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: potato level1: vegetables level2: potato @@ -51,33 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the potato with left hand and put it in the storage box, - and pick up the onion with right hand and put it in the storage box. +task_instruction: +- pick up the potato with left hand and put it in the storage box, and pick up the + onion with right hand and put it in the storage box. sub_tasks: -- Grasp the eggplant with the right gripper -- End -- Place the sweet potato into the left compartment of the storage box with the left - gripper -- Place the eggplant into the right compartment of the storage box with the right - gripper -- Grasp the sweet potato with the left gripper -- 'null' +- subtask: Grasp the eggplant with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the sweet potato into the left compartment of the storage box with + the left gripper + subtask_index: 2 +- subtask: Place the eggplant into the right compartment of the storage box with the + right gripper + subtask_index: 3 +- subtask: Grasp the sweet potato with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - pinch - pick - place - grasp -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -86,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -100,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 6502 fps: 30 @@ -129,11 +132,9 @@ data_structure: "Airbot_MMK2_storage_onion_sweet_potato_qced_hardlink/\n|-- anno \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -403,7 +404,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -411,7 +412,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -438,136 +438,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_onion_sweet_potato - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: Kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the potato with left hand and put it in the storage box, and pick up the - onion with right hand and put it in the storage box. - sub_tasks: - - subtask: Grasp the eggplant with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the sweet potato into the left compartment of the storage box with - the left gripper - subtask_index: 2 - - subtask: Place the eggplant into the right compartment of the storage box with - the right gripper - subtask_index: 3 - - subtask: Grasp the sweet potato with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - pinch - - pick - - place - - grasp - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6502 - dataset_size: 173.29 MB - data_structure: "Airbot_MMK2_storage_onion_sweet_potato_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_paper_box_sponge.yaml b/dataset_info/Airbot_MMK2_storage_paper_box_sponge.yaml index 2a6f8fdeb0ec4b90baa92cd6382684ac3b806f84..3b99e9ad84830ffd2fc9ee9ffcf57f776cdcf934 100644 --- a/dataset_info/Airbot_MMK2_storage_paper_box_sponge.yaml +++ b/dataset_info/Airbot_MMK2_storage_paper_box_sponge.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: small_black_boxes level1: home_storage level2: small_black_boxes @@ -51,32 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the box and the sponge into the plate respectively with left - and right hands. +task_instruction: +- put the box and the sponge into the plate respectively with left and right hands. sub_tasks: -- Grasp the sponge with the right gripper -- Place the sponge on the white plate with the right gripper -- Abnormal -- Static -- End -- Place the box on the white plate with the left gripper -- Grasp the box with the left gripper -- 'null' +- subtask: Grasp the sponge with the right gripper + subtask_index: 0 +- subtask: Place the sponge on the white plate with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the box on the white plate with the left gripper + subtask_index: 5 +- subtask: Grasp the box with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 9624 fps: 30 @@ -128,11 +132,9 @@ data_structure: "Airbot_MMK2_storage_paper_box_sponge_qced_hardlink/\n|-- annota \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -402,7 +404,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -410,7 +412,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -437,136 +438,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_paper_box_sponge - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the box and the sponge into the plate respectively with left and right hands. - sub_tasks: - - subtask: Grasp the sponge with the right gripper - subtask_index: 0 - - subtask: Place the sponge on the white plate with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the box on the white plate with the left gripper - subtask_index: 5 - - subtask: Grasp the box with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9624 - dataset_size: 304.37 MB - data_structure: "Airbot_MMK2_storage_paper_box_sponge_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_penguin_doll_tiger_doll.yaml b/dataset_info/Airbot_MMK2_storage_penguin_doll_tiger_doll.yaml index 4daea9a3e783ca12d71f88d0de3f83c2ef9bffd2..7aaaed42af89ee0b67303eec91d3d97c266c8111 100644 --- a/dataset_info/Airbot_MMK2_storage_penguin_doll_tiger_doll.yaml +++ b/dataset_info/Airbot_MMK2_storage_penguin_doll_tiger_doll.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: doll level1: toys level2: doll @@ -39,30 +39,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the doll on the plate. +task_instruction: +- put the doll on the plate. sub_tasks: -- End -- Grasp the penguin doll with the left gripper -- Grasp the tiger doll with the right gripper -- Place the penguin doll into the white basket with the left gripper -- Abnormal -- Place the tiger doll into the white basket with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the penguin doll with the left gripper + subtask_index: 1 +- subtask: Grasp the tiger doll with the right gripper + subtask_index: 2 +- subtask: Place the penguin doll into the white basket with the left gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Place the tiger doll into the white basket with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -71,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 13342 fps: 30 @@ -114,11 +118,9 @@ data_structure: "Airbot_MMK2_storage_penguin_doll_tiger_doll_qced_hardlink/\n|-- \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -388,7 +390,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -396,7 +398,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -423,134 +424,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_penguin_doll_tiger_doll - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the doll on the plate. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the penguin doll with the left gripper - subtask_index: 1 - - subtask: Grasp the tiger doll with the right gripper - subtask_index: 2 - - subtask: Place the penguin doll into the white basket with the left gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Place the tiger doll into the white basket with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13342 - dataset_size: 424.08 MB - data_structure: "Airbot_MMK2_storage_penguin_doll_tiger_doll_qced_hardlink/\n|--\ - \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_pineapple.yaml b/dataset_info/Airbot_MMK2_storage_pineapple.yaml index 394d707cf88b858a72409285ba77ebe647280aec..b671fc8c11b5a939f698fbf6afef3379f32650d7 100644 --- a/dataset_info/Airbot_MMK2_storage_pineapple.yaml +++ b/dataset_info/Airbot_MMK2_storage_pineapple.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pineapple level1: fruit level2: pineapple @@ -45,28 +45,32 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the pineapple with right hand and put it into the storage - box. +task_instruction: +- pick up the pineapple with right hand and put it into the storage box. sub_tasks: -- Place the pineapple into the storage box with the right gripper -- End -- Grasp the pineapple with the right gripper -- 'null' +- subtask: Place the pineapple into the storage box with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the pineapple with the right gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -75,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -89,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 5219 fps: 30 @@ -118,11 +118,9 @@ data_structure: "Airbot_MMK2_storage_pineapple_qced_hardlink/\n|-- annotations\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +390,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +398,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,128 +424,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_pineapple - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the pineapple with right hand and put it into the storage box. - sub_tasks: - - subtask: Place the pineapple into the storage box with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the pineapple with the right gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5219 - dataset_size: 140.76 MB - data_structure: "Airbot_MMK2_storage_pineapple_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_potato_left.yaml b/dataset_info/Airbot_MMK2_storage_potato_left.yaml index 978eba9c063f3483d6bc067964525c6f68cd2f5e..49fc895bbdfbd6dbb9f204d0d46332eca11833d9 100644 --- a/dataset_info/Airbot_MMK2_storage_potato_left.yaml +++ b/dataset_info/Airbot_MMK2_storage_potato_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: potato level1: vegetables level2: potato @@ -45,31 +45,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the potato with left hand and put it in the storage box. +task_instruction: +- pick up the potato with left hand and put it in the storage box. sub_tasks: -- Grasp the potato with the right gripper -- Grasp the potato with the left gripper -- Abnormal -- Place the potato into the left compartment of the storage box with the left gripper -- End -- Static -- Place the potato into the right compartment of the storage box with the right gripper -- 'null' +- subtask: Grasp the potato with the right gripper + subtask_index: 0 +- subtask: Grasp the potato with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Place the potato into the left compartment of the storage box with the + left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Static + subtask_index: 5 +- subtask: Place the potato into the right compartment of the storage box with the + right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 69 total_frames: 8728 fps: 30 @@ -121,11 +128,9 @@ data_structure: "Airbot_MMK2_Airbot_MMK2_storage_potato_left_qced_hardlink/\n|-- \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:68 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,138 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_potato_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the potato with left hand and put it in the storage box. - sub_tasks: - - subtask: Grasp the potato with the right gripper - subtask_index: 0 - - subtask: Grasp the potato with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Place the potato into the left compartment of the storage box with the - left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Static - subtask_index: 5 - - subtask: Place the potato into the right compartment of the storage box with the - right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8728 - dataset_size: 322.26 MB - data_structure: "Airbot_MMK2_Airbot_MMK2_storage_potato_left_qced_hardlink/\n|--\ - \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (57 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_potato_pumpkin.yaml b/dataset_info/Airbot_MMK2_storage_potato_pumpkin.yaml index da17efd55f3805c22195712c1ab60ec5efa49475..e4ec8ac0990b6364adbfc51e582df5902cc0a012 100644 --- a/dataset_info/Airbot_MMK2_storage_potato_pumpkin.yaml +++ b/dataset_info/Airbot_MMK2_storage_potato_pumpkin.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: potato level1: vegetables level2: potato @@ -51,30 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the potato with left hand and put it in the storage box, - and pick up the pumpkin with right hand and put it in the storage box. +task_instruction: +- pick up the potato with left hand and put it in the storage box, and pick up the + pumpkin with right hand and put it in the storage box. sub_tasks: -- Grasp the pumpkin with the right gripper -- Grasp the potato with the left gripper -- Place the potato into the left compartment of the storage box with the left gripper -- End -- Place the pumpkin into the right compartment of the storage box with the right gripper -- 'null' +- subtask: Grasp the pumpkin with the right gripper + subtask_index: 0 +- subtask: Grasp the potato with the left gripper + subtask_index: 1 +- subtask: Place the potato into the left compartment of the storage box with the + left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the pumpkin into the right compartment of the storage box with the + right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - pick - place - grasp -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 41 total_frames: 9965 fps: 30 @@ -126,11 +131,9 @@ data_structure: "Airbot_MMK2_Airbot_MMK2_storage_potato_pumpkin_qced_hardlink/\n \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:40 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_potato_pumpkin - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the potato with left hand and put it in the storage box, and pick up the - pumpkin with right hand and put it in the storage box. - sub_tasks: - - subtask: Grasp the pumpkin with the right gripper - subtask_index: 0 - - subtask: Grasp the potato with the left gripper - subtask_index: 1 - - subtask: Place the potato into the left compartment of the storage box with the - left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the pumpkin into the right compartment of the storage box with - the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - pick - - place - - grasp - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9965 - dataset_size: 288.93 MB - data_structure: "Airbot_MMK2_Airbot_MMK2_storage_potato_pumpkin_qced_hardlink/\n\ - |-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_pumpkin_left.yaml b/dataset_info/Airbot_MMK2_storage_pumpkin_left.yaml index 24e89c7867d092ce1d29876746cbcfebb2e905c8..1695332f7658f3364f065bbf278484675c1dc50a 100644 --- a/dataset_info/Airbot_MMK2_storage_pumpkin_left.yaml +++ b/dataset_info/Airbot_MMK2_storage_pumpkin_left.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pumpkin level1: vegetables level2: pumpkin @@ -45,28 +45,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the pumpkin with left hand and put it in the bowl. +task_instruction: +- pick up the pumpkin with left hand and put it in the bowl. sub_tasks: -- End -- Static -- Grasp the pumpkin with the left gripper -- Place the pumpkin on the pink bowl with the left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Static + subtask_index: 1 +- subtask: Grasp the pumpkin with the left gripper + subtask_index: 2 +- subtask: Place the pumpkin on the pink bowl with the left gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -75,13 +81,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -89,8 +92,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 6026 fps: 30 @@ -118,11 +120,9 @@ data_structure: "Airbot_MMK2_Airbot_MMK2_storage_pumpkin_left_qced_hardlink/\n|- \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +392,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +400,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,130 +426,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_pumpkin_left - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the pumpkin with left hand and put it in the bowl. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Static - subtask_index: 1 - - subtask: Grasp the pumpkin with the left gripper - subtask_index: 2 - - subtask: Place the pumpkin on the pink bowl with the left gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6026 - dataset_size: 227.20 MB - data_structure: "Airbot_MMK2_Airbot_MMK2_storage_pumpkin_left_qced_hardlink/\n|--\ - \ annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_pumpkin_right.yaml b/dataset_info/Airbot_MMK2_storage_pumpkin_right.yaml index 60e4c9eb40868faf19eb0c4cdb4aa63b864f3a0e..27bc6ebaffb531e3428ab77fe7b60c5ec2911937 100644 --- a/dataset_info/Airbot_MMK2_storage_pumpkin_right.yaml +++ b/dataset_info/Airbot_MMK2_storage_pumpkin_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_storage_pumpkin_right dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household - level2: kitchen + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pumpkin level1: vegetables level2: pumpkin @@ -42,29 +45,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the pumpkin with right hand and put it in the storage box. +task_instruction: +- pick up the pumpkin with right hand and put it in the storage box. sub_tasks: -- Place the pumpkin into the right compartment of the storage box -- Place the pumpkin into the right compartment of the storage box with the right gripper -- Grasp the pumpkin with the right gripper -- End -- Static -- 'null' +- subtask: Place the pumpkin into the right compartment of the storage box + subtask_index: 0 +- subtask: Place the pumpkin into the right compartment of the storage box with the + right gripper + subtask_index: 1 +- subtask: Grasp the pumpkin with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - place - pick -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 12358 fps: 30 @@ -116,11 +123,9 @@ data_structure: "Airbot_MMK2_Airbot_MMK2_storage_pumpkin_right_qced_hardlink/\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -390,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -398,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -425,133 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_pumpkin_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the pumpkin with right hand and put it in the storage box. - sub_tasks: - - subtask: Place the pumpkin into the right compartment of the storage box - subtask_index: 0 - - subtask: Place the pumpkin into the right compartment of the storage box with - the right gripper - subtask_index: 1 - - subtask: Grasp the pumpkin with the right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - place - - pick - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 12358 - dataset_size: 463.31 MB - data_structure: "Airbot_MMK2_Airbot_MMK2_storage_pumpkin_right_qced_hardlink/\n\ - |-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_shark_doll.yaml b/dataset_info/Airbot_MMK2_storage_shark_doll.yaml index 8f2af3825ccc5dd3606c52dc790b1b7205c9ee7e..c6aac08887c3c3e0c8f0bc15f6c6165b3b1b67e8 100644 --- a/dataset_info/Airbot_MMK2_storage_shark_doll.yaml +++ b/dataset_info/Airbot_MMK2_storage_shark_doll.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -51,28 +51,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the shark doll and put it in the lid. +task_instruction: +- pick up the shark doll and put it in the lid. sub_tasks: -- Place the whale on the white lid with the right gripper -- Grasp the whale with the left gripper -- Deliver the whale from left gripper to right gripper -- End -- 'null' +- subtask: Place the whale on the white lid with the right gripper + subtask_index: 0 +- subtask: Grasp the whale with the left gripper + subtask_index: 1 +- subtask: Deliver the whale from left gripper to right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - place - pick -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 93 total_frames: 13034 fps: 30 @@ -124,11 +126,9 @@ data_structure: "Airbot_MMK2_storage_shark_doll_qced_hardlink/\n|-- annotations\ \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:92 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -398,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -406,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -433,130 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_shark_doll - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the shark doll and put it in the lid. - sub_tasks: - - subtask: Place the whale on the white lid with the right gripper - subtask_index: 0 - - subtask: Grasp the whale with the left gripper - subtask_index: 1 - - subtask: Deliver the whale from left gripper to right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - place - - pick - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13034 - dataset_size: 461.32 MB - data_structure: "Airbot_MMK2_storage_shark_doll_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (81 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_sponge_wet_wipes.yaml b/dataset_info/Airbot_MMK2_storage_sponge_wet_wipes.yaml index 15ee718a5cb4808f78bc7bbd102e428c3caeeec0..3a2dc969f60fcd5bc30ce145ea6f822e256fc433 100644 --- a/dataset_info/Airbot_MMK2_storage_sponge_wet_wipes.yaml +++ b/dataset_info/Airbot_MMK2_storage_sponge_wet_wipes.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -57,32 +57,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the sponge and wet wipes with both hands simultaneously - and put them into the basket at the same time. +task_instruction: +- pick up the sponge and wet wipes with both hands simultaneously and put them into + the basket at the same time. sub_tasks: -- Abnormal -- Place the sponge into the white basket with the left gripper -- Static -- Grasp the sponge with the left gripper -- Place the wet wipes into the white basket with the right gripper -- End -- Grasp the wet wipes with the right gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: 'Place the sponge into the white basket with the left gripper ' + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: 'Grasp the sponge with the left gripper ' + subtask_index: 3 +- subtask: Place the wet wipes into the white basket with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the wet wipes with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -91,13 +100,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -105,8 +111,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 40 total_frames: 4690 fps: 30 @@ -134,11 +139,9 @@ data_structure: "Airbot_MMK2_storage_sponge_wet_wipes_qced_hardlink/\n|-- annota \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:39 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -408,7 +411,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -416,7 +419,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -443,137 +445,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_sponge_wet_wipes - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the sponge and wet wipes with both hands simultaneously and put them into - the basket at the same time. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: 'Place the sponge into the white basket with the left gripper ' - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: 'Grasp the sponge with the left gripper ' - subtask_index: 3 - - subtask: Place the wet wipes into the white basket with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the wet wipes with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 4690 - dataset_size: 191.12 MB - data_structure: "Airbot_MMK2_storage_sponge_wet_wipes_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (28 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_spoon.yaml b/dataset_info/Airbot_MMK2_storage_spoon.yaml index c78c5fca61c51471b4ad8e99dc37d02cdbd50bfd..bf25028f1d8aa984a15610a87580c00b07487c9c 100644 --- a/dataset_info/Airbot_MMK2_storage_spoon.yaml +++ b/dataset_info/Airbot_MMK2_storage_spoon.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: spoon level1: kitchen_supplies level2: spoon @@ -45,30 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the spoons on the table by hand and place them into the - basin. +task_instruction: +- pick up the spoons on the table by hand and place them into the basin. sub_tasks: -- Grasp the spoon with the right gripper -- End -- Grasp the spoon with the left gripper -- Place the spoon into the basin with the right gripper -- Place the spoon into the basin with the left gripper -- 'null' +- subtask: Grasp the spoon with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the spoon with the left gripper + subtask_index: 2 +- subtask: Place the spoon into the basin with the right gripper + subtask_index: 3 +- subtask: Place the spoon into the basin with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 54 total_frames: 15550 fps: 30 @@ -119,11 +121,9 @@ data_structure: "Airbot_MMK2_storage_spoon_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:53 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,132 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_spoon - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the spoons on the table by hand and place them into the basin. - sub_tasks: - - subtask: Grasp the spoon with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the spoon with the left gripper - subtask_index: 2 - - subtask: Place the spoon into the basin with the right gripper - subtask_index: 3 - - subtask: Place the spoon into the basin with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 15550 - dataset_size: 509.63 MB - data_structure: "Airbot_MMK2_storage_spoon_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ - \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_stationery_xylophone.yaml b/dataset_info/Airbot_MMK2_storage_stationery_xylophone.yaml index 3335d41d9c79c9c2ac33fdd7f92d025b8b101ef7..111665a4ba580a649ae4652ae9e165d9c092ed1a 100644 --- a/dataset_info/Airbot_MMK2_storage_stationery_xylophone.yaml +++ b/dataset_info/Airbot_MMK2_storage_stationery_xylophone.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -23,11 +23,14 @@ codebase_version: v2.1 dataset_name: Airbot_MMK2_storage_stationery_xylophone dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household - level2: living_room + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: xylophone level1: musical_instrument level2: xylophone @@ -48,32 +51,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the xylophone with left hand and put it in the lid, and - pick up the stationery with right hand and put it in the lid. +task_instruction: +- pick up the xylophone with left hand and put it in the lid, and pick up the stationery + with right hand and put it in the lid. sub_tasks: -- Grasp the xylophone with the left gripper -- Place the xylophone on the white lid with the left gripper -- Abnormal -- Static -- Place the tongs on the white lid with the right gripper -- End -- Grasp the tongs with the right gripper -- 'null' +- subtask: Grasp the xylophone with the left gripper + subtask_index: 0 +- subtask: Place the xylophone on the white lid with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Place the tongs on the white lid with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the tongs with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -82,13 +94,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -96,8 +105,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 24223 fps: 30 @@ -125,11 +133,9 @@ data_structure: "Airbot_MMK2_storage_stationery_xylophone_qced_hardlink/\n|-- an \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -399,7 +405,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -407,7 +413,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -434,137 +439,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_stationery_xylophone - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: scene_level1 - level2: scene_level2 - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the xylophone with left hand and put it in the lid, and pick up the stationery - with right hand and put it in the lid. - sub_tasks: - - subtask: Grasp the xylophone with the left gripper - subtask_index: 0 - - subtask: Place the xylophone on the white lid with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Place the tongs on the white lid with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the tongs with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 24223 - dataset_size: 791.71 MB - data_structure: "Airbot_MMK2_storage_stationery_xylophone_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storage_tape_measure_umbrella.yaml b/dataset_info/Airbot_MMK2_storage_tape_measure_umbrella.yaml index 7ba83985c9c9444704d5783525fd709dfb7fcd0d..f776376be28d239cb82924624a57922f4995a550 100644 --- a/dataset_info/Airbot_MMK2_storage_tape_measure_umbrella.yaml +++ b/dataset_info/Airbot_MMK2_storage_tape_measure_umbrella.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: basin level1: storage_utensils level2: basin @@ -51,30 +51,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: put the tape measure and the umbrella in the basin. +task_instruction: +- put the tape measure and the umbrella in the basin. sub_tasks: -- End -- Grasp the tape measure with the right gripper -- Grasp the umbrella with the left gripper -- Static -- Place the umbrella in the white basket with the left gripper -- Place the tape measure in the white basket with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the tape measure with the right gripper + subtask_index: 1 +- subtask: Grasp the umbrella with the left gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Place the umbrella in the white basket with the left gripper + subtask_index: 4 +- subtask: Place the tape measure in the white basket with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 40 total_frames: 7112 fps: 30 @@ -126,11 +130,9 @@ data_structure: "Airbot_MMK2_storage_tape_measure_umbrella_qced_hardlink/\n|-- a \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:39 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,134 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_tape_measure_umbrella - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - put the tape measure and the umbrella in the basin. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the tape measure with the right gripper - subtask_index: 1 - - subtask: Grasp the umbrella with the left gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Place the umbrella in the white basket with the left gripper - subtask_index: 4 - - subtask: Place the tape measure in the white basket with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7112 - dataset_size: 236.10 MB - data_structure: "Airbot_MMK2_storage_tape_measure_umbrella_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (28 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_tissues_tub.yaml b/dataset_info/Airbot_MMK2_storage_tissues_tub.yaml index c89b10538f1b619a29fda5efbdef1bd78a36a2f6..f07b12f069aeeb93b0d935e7b8fc5d760bb671fd 100644 --- a/dataset_info/Airbot_MMK2_storage_tissues_tub.yaml +++ b/dataset_info/Airbot_MMK2_storage_tissues_tub.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: tissue_paper level1: paper_towels level2: tissue_paper @@ -45,32 +45,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the tissues on the table by hand and place them on the white - tray. +task_instruction: +- pick up the tissues on the table by hand and place them on the white tray. sub_tasks: -- Grasp the tissue with the right gripper -- Abnormal -- Static -- Place the tissue into the brown basin with the right gripper -- Place the tissue into the brown basin with the left gripper -- End -- Grasp the tissue with the left gripper -- 'null' +- subtask: Grasp the tissue with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Place the tissue into the brown basin with the right gripper + subtask_index: 3 +- subtask: Place the tissue into the brown basin with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the tissue with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +87,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +98,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 6851 fps: 30 @@ -122,11 +126,9 @@ data_structure: "Airbot_MMK2_storage_tissues_tub_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +398,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +406,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,136 +432,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_tissues_tub - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the tissues on the table by hand and place them on the white tray. - sub_tasks: - - subtask: Grasp the tissue with the right gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: Place the tissue into the brown basin with the right gripper - subtask_index: 3 - - subtask: Place the tissue into the brown basin with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the tissue with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6851 - dataset_size: 207.40 MB - data_structure: "Airbot_MMK2_storage_tissues_tub_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_tomato_potato.yaml b/dataset_info/Airbot_MMK2_storage_tomato_potato.yaml index 9e2ed844be7879442378543c467bf999cb3ff2d4..6bf1c415b9ad7236479c748346268f04e38dfd05 100644 --- a/dataset_info/Airbot_MMK2_storage_tomato_potato.yaml +++ b/dataset_info/Airbot_MMK2_storage_tomato_potato.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: potato level1: vegetables level2: potato @@ -51,30 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the potato with left hand and put it in the storage box, - and pick up the tomato with right hand and put it in the storage box. +task_instruction: +- pick up the potato with left hand and put it in the storage box, and pick up the + tomato with right hand and put it in the storage box. sub_tasks: -- Grasp the tomato with the right gripper -- End -- Place the potato into the left compartment of the storage box with the left gripper -- Grasp the potato with the left gripper -- Place the tomato into the right compartment of the storage box with the right gripper -- 'null' +- subtask: Grasp the tomato with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the potato into the left compartment of the storage box with the + left gripper + subtask_index: 2 +- subtask: Grasp the potato with the left gripper + subtask_index: 3 +- subtask: Place the tomato into the right compartment of the storage box with the + right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 6005 fps: 30 @@ -126,11 +131,9 @@ data_structure: "Airbot_MMK2_storage_tomato_potato_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_tomato_potato - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the potato with left hand and put it in the storage box, and pick up the - tomato with right hand and put it in the storage box. - sub_tasks: - - subtask: Grasp the tomato with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Place the potato into the left compartment of the storage box with the - left gripper - subtask_index: 2 - - subtask: Grasp the potato with the left gripper - subtask_index: 3 - - subtask: Place the tomato into the right compartment of the storage box with the - right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6005 - dataset_size: 161.13 MB - data_structure: "Airbot_MMK2_storage_tomato_potato_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_tools.yaml b/dataset_info/Airbot_MMK2_storage_tools.yaml index e42f22045c4c06149b06180d3ecb9bc687cbb1c4..f8abecd427fabfa6e5d189683b03d17760f28d24 100644 --- a/dataset_info/Airbot_MMK2_storage_tools.yaml +++ b/dataset_info/Airbot_MMK2_storage_tools.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: utility_knife level1: kitchen_supplies level2: utility_knife @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the plier and the utility knife on the table by hand and - place them into the white tray. +task_instruction: +- pick up the plier and the utility knife on the table by hand and place them into + the white tray. sub_tasks: -- Place the plier into white basket with the left gripper -- Grasp the plier with the left gripper -- Static -- Place the utility knife into white basket with the right gripper -- Grasp the utility knife with the right gripper -- End -- 'null' +- subtask: Place the plier into white basket with the left gripper + subtask_index: 0 +- subtask: Grasp the plier with the left gripper + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Place the utility knife into white basket with the right gripper + subtask_index: 3 +- subtask: Grasp the utility knife with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 7910 fps: 30 @@ -126,11 +130,9 @@ data_structure: "Airbot_MMK2_storage_tools_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,135 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_tools - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: industry - level2: factory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the plier and the utility knife on the table by hand and place them into - the white tray. - sub_tasks: - - subtask: Place the plier into white basket with the left gripper - subtask_index: 0 - - subtask: Grasp the plier with the left gripper - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: Place the utility knife into white basket with the right gripper - subtask_index: 3 - - subtask: Grasp the utility knife with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7910 - dataset_size: 221.53 MB - data_structure: "Airbot_MMK2_storage_tools_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ - \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_storage_tumbler_umbrella.yaml b/dataset_info/Airbot_MMK2_storage_tumbler_umbrella.yaml index ca320d36822596f52ef95f6eaeb2cc84541cd9e4..4b3e73c7954386897c3ceddd8322545ccfb2af33 100644 --- a/dataset_info/Airbot_MMK2_storage_tumbler_umbrella.yaml +++ b/dataset_info/Airbot_MMK2_storage_tumbler_umbrella.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -57,31 +57,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the umbrella with left hand and put it in the basket, then - use right hand to pick up the thermos and put it in the basket. +task_instruction: +- pick up the umbrella with left hand and put it in the basket, then use right hand + to pick up the thermos and put it in the basket. sub_tasks: -- Place the cup into the white basket with the right gripper -- Grasp the umbrella with the left gripper -- Place the umbrella into the white basket with the left gripper -- Static -- End -- Grasp the cup with the right gripper -- 'null' +- subtask: Place the cup into the white basket with the right gripper + subtask_index: 0 +- subtask: Grasp the umbrella with the left gripper + subtask_index: 1 +- subtask: Place the umbrella into the white basket with the left gripper + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the cup with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -90,13 +98,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -104,8 +109,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 14373 fps: 30 @@ -133,11 +137,9 @@ data_structure: "Airbot_MMK2_storage_tumbler_umbrella_qced_hardlink/\n|-- annota \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -407,7 +409,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -415,7 +417,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -442,135 +443,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storage_tumbler_umbrella - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the umbrella with left hand and put it in the basket, then use right hand - to pick up the thermos and put it in the basket. - sub_tasks: - - subtask: Place the cup into the white basket with the right gripper - subtask_index: 0 - - subtask: Grasp the umbrella with the left gripper - subtask_index: 1 - - subtask: Place the umbrella into the white basket with the left gripper - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the cup with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 14373 - dataset_size: 542.57 MB - data_structure: "Airbot_MMK2_storage_tumbler_umbrella_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_storge_cake_ice_cream.yaml b/dataset_info/Airbot_MMK2_storge_cake_ice_cream.yaml index 31b0f1034e006ec29dbf17c388022ad15b177f14..1bdf7b38ed733c6d2e3c55efeeb6fa9ef73b881d 100644 --- a/dataset_info/Airbot_MMK2_storge_cake_ice_cream.yaml +++ b/dataset_info/Airbot_MMK2_storge_cake_ice_cream.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cake level1: food level2: cake @@ -57,32 +57,41 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: hold the cake in left hand and the ice cream in right hand at the - same time. put the cake into the bowl and place the ice cream on the plate. +task_instruction: +- hold the cake in left hand and the ice cream in right hand at the same time. put + the cake into the bowl and place the ice cream on the plate. sub_tasks: -- Place the ice cream into the plate with the right gripper -- Grasp the ice cream with the right gripper -- Grasp the cake with the left gripper -- Place the cake into the bowl with the left gripper -- Static -- Grasp the cake from the table and with the left gripper -- End -- 'null' +- subtask: Place the ice cream into the plate with the right gripper + subtask_index: 0 +- subtask: Grasp the ice cream with the right gripper + subtask_index: 1 +- subtask: Grasp the cake with the left gripper + subtask_index: 2 +- subtask: Place the cake into the bowl with the left gripper + subtask_index: 3 +- subtask: Static + subtask_index: 4 +- subtask: Grasp the cake from the table and with the left gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -91,13 +100,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -105,8 +111,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 8733 fps: 30 @@ -134,11 +139,9 @@ data_structure: "Airbot_MMK2_storge_cake_ice_cream_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -408,7 +411,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -416,7 +419,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -443,137 +445,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_storge_cake_ice_cream - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - hold the cake in left hand and the ice cream in right hand at the same time. put - the cake into the bowl and place the ice cream on the plate. - sub_tasks: - - subtask: Place the ice cream into the plate with the right gripper - subtask_index: 0 - - subtask: Grasp the ice cream with the right gripper - subtask_index: 1 - - subtask: Grasp the cake with the left gripper - subtask_index: 2 - - subtask: Place the cake into the bowl with the left gripper - subtask_index: 3 - - subtask: Static - subtask_index: 4 - - subtask: Grasp the cake from the table and with the left gripper - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8733 - dataset_size: 351.41 MB - data_structure: "Airbot_MMK2_storge_cake_ice_cream_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_swap_apple_cake_plate.yaml b/dataset_info/Airbot_MMK2_swap_apple_cake_plate.yaml index bf58c99224d11b6ca91644146c6e53afd34638e1..e8da38c7a3f3ad7b2cfa17fd077ee89f1784a260 100644 --- a/dataset_info/Airbot_MMK2_swap_apple_cake_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_apple_cake_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cake level1: bread level2: cake @@ -51,29 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the cake out of the plate and put the apples in. +task_instruction: +- take the cake out of the plate and put the apples in. sub_tasks: -- Grasp the cake placed into the plate with the left gripper -- Grasp the apple with the right gripper -- End -- Place the cake on the table with the left gripper -- Place the apple into the plate with the right gripper -- 'null' +- subtask: Grasp the cake placed into the plate with the left gripper + subtask_index: 0 +- subtask: Grasp the apple with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the cake on the table with the left gripper + subtask_index: 3 +- subtask: Place the apple into the plate with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -82,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -96,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 6775 fps: 30 @@ -125,11 +128,9 @@ data_structure: "Airbot_MMK2_swap_apple_cake_plate_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -399,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -407,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -434,132 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_apple_cake_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the cake out of the plate and put the apples in. - sub_tasks: - - subtask: Grasp the cake placed into the plate with the left gripper - subtask_index: 0 - - subtask: Grasp the apple with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the cake on the table with the left gripper - subtask_index: 3 - - subtask: Place the apple into the plate with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6775 - dataset_size: 182.07 MB - data_structure: "Airbot_MMK2_swap_apple_cake_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_swap_bbs_block_plate.yaml b/dataset_info/Airbot_MMK2_swap_bbs_block_plate.yaml index f37d44ac235ba2ed13897140a4bff3dd5b686f91..b18bf1b499e2860bd431d6621f57b22571ce0c20 100644 --- a/dataset_info/Airbot_MMK2_swap_bbs_block_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_bbs_block_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: bb_pellets level1: toys level2: bb_pellets @@ -51,31 +51,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the bb balls out of the plate with left hand and put them back - in with right hand. +task_instruction: +- take the bb balls out of the plate with left hand and put them back in with right + hand. sub_tasks: -- Place the green rectangular block on the plate with the right gripper -- Place the bullet on the table with the left gripper -- Graasp the green rectangular block with the right gripper -- Abnormal -- Graasp the bullet on the plate and with the left gripper -- End -- 'null' +- subtask: Place the green rectangular block on the plate with the right gripper + subtask_index: 0 +- subtask: Place the bullet on the table with the left gripper + subtask_index: 1 +- subtask: Graasp the green rectangular block with the right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Graasp the bullet on the plate and with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +92,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +103,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 9449 fps: 30 @@ -127,11 +131,9 @@ data_structure: "Airbot_MMK2_swap_bbs_block_plate_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +403,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +411,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,135 +437,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_bbs_block_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the bb balls out of the plate with left hand and put them back in with right - hand. - sub_tasks: - - subtask: Place the green rectangular block on the plate with the right gripper - subtask_index: 0 - - subtask: Place the bullet on the table with the left gripper - subtask_index: 1 - - subtask: Graasp the green rectangular block with the right gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Graasp the bullet on the plate and with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 9449 - dataset_size: 295.93 MB - data_structure: "Airbot_MMK2_swap_bbs_block_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_swap_bbs_cake_plate.yaml b/dataset_info/Airbot_MMK2_swap_bbs_cake_plate.yaml index ce4eabd640cd87908fa967c140bb51ed3e19cb12..7315224f20f389424cd4a599755cb7404a67207c 100644 --- a/dataset_info/Airbot_MMK2_swap_bbs_cake_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_bbs_cake_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: bb_pellets level1: toys level2: bb_pellets @@ -51,32 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the bbs out of the plate with left hand and put the cake in - with right hand. +task_instruction: +- take the bbs out of the plate with left hand and put the cake in with right hand. sub_tasks: -- Grasp the cake on the table with the right gripper -- Place the bullet on the table with the left gripper -- Grasp the bullet into the plate with the left gripper -- Abnormal -- Place the cake into the plate with the right gripper -- Static -- End -- 'null' +- subtask: Grasp the cake on the table with the right gripper + subtask_index: 0 +- subtask: Place the bullet on the table with the left gripper + subtask_index: 1 +- subtask: Grasp the bullet into the plate with the left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Place the cake into the plate with the right gripper + subtask_index: 4 +- subtask: Static + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 7877 fps: 30 @@ -128,11 +132,9 @@ data_structure: "Airbot_MMK2_swap_bbs_cake_plate_qced_hardlink/\n|-- annotations \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -402,7 +404,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -410,7 +412,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -437,136 +438,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_bbs_cake_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the bbs out of the plate with left hand and put the cake in with right hand. - sub_tasks: - - subtask: Grasp the cake on the table with the right gripper - subtask_index: 0 - - subtask: Place the bullet on the table with the left gripper - subtask_index: 1 - - subtask: Grasp the bullet into the plate with the left gripper - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Place the cake into the plate with the right gripper - subtask_index: 4 - - subtask: Static - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 7877 - dataset_size: 384.19 MB - data_structure: "Airbot_MMK2_swap_bbs_cake_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_swap_bottle_wet_wipes_plate.yaml b/dataset_info/Airbot_MMK2_swap_bottle_wet_wipes_plate.yaml index e6c5aa0396b38f287588085e3ee06cda02c8b105..ba36a26cea75aaa3addf37f135083a18aaf43fde 100644 --- a/dataset_info/Airbot_MMK2_swap_bottle_wet_wipes_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_bottle_wet_wipes_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: wipes level1: kitchen_supplies level2: wipes @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the bottle out of the plate with left hand and put the wet - wipes in with right hand. +task_instruction: +- take the bottle out of the plate with left hand and put the wet wipes in with right + hand. sub_tasks: -- Place the wet wipes into the plate with the right gripper -- Place the Vitamin B water on the table with the left gripper -- Grasp the Vitamin B water in the plate with the left gripper -- End -- Grasp the wet wipes with the right gripper -- 'null' +- subtask: Place the wet wipes into the plate with the right gripper + subtask_index: 0 +- subtask: Place the Vitamin B water on the table with the left gripper + subtask_index: 1 +- subtask: Grasp the Vitamin B water in the plate with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Grasp the wet wipes with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 15726 fps: 30 @@ -126,11 +129,9 @@ data_structure: "Airbot_MMK2_swap_bottle_wet_wipes_plate_qced_hardlink/\n|-- ann \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +401,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +409,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,133 +435,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_bottle_wet_wipes_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the bottle out of the plate with left hand and put the wet wipes in with - right hand. - sub_tasks: - - subtask: Place the wet wipes into the plate with the right gripper - subtask_index: 0 - - subtask: Place the Vitamin B water on the table with the left gripper - subtask_index: 1 - - subtask: Grasp the Vitamin B water in the plate with the left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Grasp the wet wipes with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 15726 - dataset_size: 490.33 MB - data_structure: "Airbot_MMK2_swap_bottle_wet_wipes_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_swap_bread_cake_plate.yaml b/dataset_info/Airbot_MMK2_swap_bread_cake_plate.yaml index 593595a73664228f9835a542ffb3c9574ba83dac..a6233f5f17a7097569bcc65a974bcd86eec6faae 100644 --- a/dataset_info/Airbot_MMK2_swap_bread_cake_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_bread_cake_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cake level1: bread level2: cake @@ -51,31 +51,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the cake out of the plate with left hand and put the bread - in with right hand. +task_instruction: +- take the cake out of the plate with left hand and put the bread in with right hand. sub_tasks: -- Grasp the bread with the right gripper -- Place the cake on the table with the left gripper -- Abnormal -- Place the bread into the plate with the right gripper -- End -- Grasp the cake on the plate with the left gripper -- 'null' +- subtask: Grasp the bread with the right gripper + subtask_index: 0 +- subtask: Place the cake on the table with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Place the bread into the plate with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Grasp the cake on the plate with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -84,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -98,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 41 total_frames: 11322 fps: 30 @@ -127,11 +130,9 @@ data_structure: "Airbot_MMK2_swap_bread_cake_plate_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:40 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -401,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -409,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -436,135 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_bread_cake_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the cake out of the plate with left hand and put the bread in with right - hand. - sub_tasks: - - subtask: Grasp the bread with the right gripper - subtask_index: 0 - - subtask: Place the cake on the table with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Place the bread into the plate with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Grasp the cake on the plate with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11322 - dataset_size: 475.32 MB - data_structure: "Airbot_MMK2_swap_bread_cake_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (29 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_swap_cake_pumpkin_plate.yaml b/dataset_info/Airbot_MMK2_swap_cake_pumpkin_plate.yaml index a06c559f3091cf66221552b40d67b436e602a1b6..68e623f812dd70fdc54d3241d44b2e3b92affcf0 100644 --- a/dataset_info/Airbot_MMK2_swap_cake_pumpkin_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_cake_pumpkin_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: plate level1: kitchen_supplies level2: plate @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the cake out with left hand and put the pumpkin in with right - hand. +task_instruction: +- take the cake out with left hand and put the pumpkin in with right hand. sub_tasks: -- Grasp the cake with the left gripper -- Grasp the pumpkinx with the right gripper -- End -- Place the cake on the table with the left gripper -- Place the pumpkin into the plate with the right gripper -- 'null' +- subtask: Grasp the cake with the left gripper + subtask_index: 0 +- subtask: Grasp the pumpkinx with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the cake on the table with the left gripper + subtask_index: 3 +- subtask: Place the pumpkin into the plate with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 13761 fps: 30 @@ -126,11 +128,9 @@ data_structure: "Airbot_MMK2_swap_cake_pumpkin_plate_qced_hardlink/\n|-- annotat \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,132 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_cake_pumpkin_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the cake out with left hand and put the pumpkin in with right hand. - sub_tasks: - - subtask: Grasp the cake with the left gripper - subtask_index: 0 - - subtask: Grasp the pumpkinx with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the cake on the table with the left gripper - subtask_index: 3 - - subtask: Place the pumpkin into the plate with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13761 - dataset_size: 399.28 MB - data_structure: "Airbot_MMK2_swap_cake_pumpkin_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_swap_cake_sponge_plate.yaml b/dataset_info/Airbot_MMK2_swap_cake_sponge_plate.yaml index 6abd3e7219db6b0a4a92330c7b850262168ad614..c51cf77a93f684a9c76749ac483d793b5a8da9fd 100644 --- a/dataset_info/Airbot_MMK2_swap_cake_sponge_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_cake_sponge_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cleaning_sponge level1: daily_necessities level2: cleaning_sponge @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the sponge out of the plate with left hand and put the cake - in with right hand. +task_instruction: +- take the sponge out of the plate with left hand and put the cake in with right hand. sub_tasks: -- Place the sponge on the table with the left gripper -- Grasp the cake from the table and with the right gripper -- Grasp the sponge from the plate and with the left gripper -- Place the cake into the plate with the right gripper -- End -- 'null' +- subtask: Place the sponge on the table with the left gripper + subtask_index: 0 +- subtask: Grasp the cake from the table and with the right gripper + subtask_index: 1 +- subtask: Grasp the sponge from the plate and with the left gripper + subtask_index: 2 +- subtask: Place the cake into the plate with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 43 total_frames: 6675 fps: 30 @@ -126,11 +128,9 @@ data_structure: "Airbot_MMK2_swap_cake_sponge_plate_qced_hardlink/\n|-- annotati \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:42 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,133 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_cake_sponge_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the sponge out of the plate with left hand and put the cake in with right - hand. - sub_tasks: - - subtask: Place the sponge on the table with the left gripper - subtask_index: 0 - - subtask: Grasp the cake from the table and with the right gripper - subtask_index: 1 - - subtask: Grasp the sponge from the plate and with the left gripper - subtask_index: 2 - - subtask: Place the cake into the plate with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6675 - dataset_size: 272.17 MB - data_structure: "Airbot_MMK2_swap_cake_sponge_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (31 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_swap_sponge_paper_box_plate.yaml b/dataset_info/Airbot_MMK2_swap_sponge_paper_box_plate.yaml index 6c36c5a01aaf61ffe86e7d2d04cd7612b8303afe..715555e1c638589f69c39831782276e95b1fe09d 100644 --- a/dataset_info/Airbot_MMK2_swap_sponge_paper_box_plate.yaml +++ b/dataset_info/Airbot_MMK2_swap_sponge_paper_box_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_boxes level1: home_storage level2: paper_boxes @@ -51,32 +51,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the sponge out of the plate with left hand and put the box - in with right hand. +task_instruction: +- take the sponge out of the plate with left hand and put the box in with right hand. sub_tasks: -- Place the sponge on the table with the left gripper -- Grasp the mouse box with the right gripper -- Abnormal -- Static -- Place the mouse box into the plate with the right gripper -- End -- Grasp the sponge in the plate with the left gripper -- 'null' +- subtask: Place the sponge on the table with the left gripper + subtask_index: 0 +- subtask: Grasp the mouse box with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Static + subtask_index: 3 +- subtask: Place the mouse box into the plate with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the sponge in the plate with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -85,13 +93,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -99,8 +104,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 8731 fps: 30 @@ -128,11 +132,9 @@ data_structure: "Airbot_MMK2_swap_sponge_paper_box_plate_qced_hardlink/\n|-- ann \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -402,7 +404,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -410,7 +412,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -437,137 +438,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_swap_sponge_paper_box_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the sponge out of the plate with left hand and put the box in with right - hand. - sub_tasks: - - subtask: Place the sponge on the table with the left gripper - subtask_index: 0 - - subtask: Grasp the mouse box with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Static - subtask_index: 3 - - subtask: Place the mouse box into the plate with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the sponge in the plate with the left gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8731 - dataset_size: 272.24 MB - data_structure: "Airbot_MMK2_swap_sponge_paper_box_plate_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_sweep_peaper.yaml b/dataset_info/Airbot_MMK2_sweep_peaper.yaml index 4f610d76ed7c1b052dbf8b0d27ed46a6809968ae..87e157611951a37e25d2406646514e75d0bb3885 100644 --- a/dataset_info/Airbot_MMK2_sweep_peaper.yaml +++ b/dataset_info/Airbot_MMK2_sweep_peaper.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_all level1: trash_bag level2: paper_all @@ -51,33 +51,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: hold the dustpan with left hand and the broom with right hand, and - sweep the paper ball into the dustpan. +task_instruction: +- hold the dustpan with left hand and the broom with right hand, and sweep the paper + ball into the dustpan. sub_tasks: -- Place the broom on the table with the right gripper -- End -- Grasp the broom from the table with the right gripper -- Grasp the dustpan from the table with the left gripper -- Place the dustpan on the table with the left gripper -- Abnormal -- Sweep the waste paper into the dustpan -- 'null' +- subtask: Place the broom on the table with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the broom from the table with the right gripper + subtask_index: 2 +- subtask: Grasp the dustpan from the table with the left gripper + subtask_index: 3 +- subtask: Place the dustpan on the table with the left gripper + subtask_index: 4 +- subtask: Abnormal + subtask_index: 5 +- subtask: Sweep the waste paper into the dustpan + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place - push -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -86,13 +95,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -100,8 +106,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 43106 fps: 30 @@ -128,11 +133,9 @@ data_structure: "Airbot_MMK2_sweep_peaper_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -402,7 +405,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -410,7 +413,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -437,138 +439,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_sweep_peaper - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - hold the dustpan with left hand and the broom with right hand, and sweep the paper - ball into the dustpan. - sub_tasks: - - subtask: Place the broom on the table with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the broom from the table with the right gripper - subtask_index: 2 - - subtask: Grasp the dustpan from the table with the left gripper - subtask_index: 3 - - subtask: Place the dustpan on the table with the left gripper - subtask_index: 4 - - subtask: Abnormal - subtask_index: 5 - - subtask: Sweep the waste paper into the dustpan - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - - push - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 43106 - dataset_size: 1.75 GB - data_structure: "Airbot_MMK2_sweep_peaper_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_BBs_block.yaml b/dataset_info/Airbot_MMK2_take_BBs_block.yaml index 48bbadac24f3fffbba0c0fa6e2a4ce792db4515c..845378728de2447bc8a9c97d4f8b8c58447b7071 100644 --- a/dataset_info/Airbot_MMK2_take_BBs_block.yaml +++ b/dataset_info/Airbot_MMK2_take_BBs_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: rectangular_building_blocks level1: building_blocks level2: rectangular_building_blocks @@ -45,31 +45,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the rectangular building blocks and bullets out of the plate - by hand and place them on the table. +task_instruction: +- take the rectangular building blocks and bullets out of the plate by hand and place + them on the table. sub_tasks: -- Grasp the green rectangular block on the plate with the left gripper -- Grasp the bullet on the plate with the right gripper -- End -- Abnormal -- Place the bullet on the table with the right gripper -- Place the green rectangular block on the table with the left gripper -- 'null' +- subtask: Grasp the green rectangular block on the plate with the left gripper + subtask_index: 0 +- subtask: Grasp the bullet on the plate with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Place the bullet on the table with the right gripper + subtask_index: 4 +- subtask: Place the green rectangular block on the table with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +86,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +97,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 43 total_frames: 13927 fps: 30 @@ -120,11 +124,9 @@ data_structure: "Airbot_MMK2_take_BBs_block_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:42 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,135 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_BBs_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the rectangular building blocks and bullets out of the plate by hand and - place them on the table. - sub_tasks: - - subtask: Grasp the green rectangular block on the plate with the left gripper - subtask_index: 0 - - subtask: Grasp the bullet on the plate with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Abnormal - subtask_index: 3 - - subtask: Place the bullet on the table with the right gripper - subtask_index: 4 - - subtask: Place the green rectangular block on the table with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13927 - dataset_size: 449.13 MB - data_structure: "Airbot_MMK2_take_BBs_block_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (31 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_block.yaml b/dataset_info/Airbot_MMK2_take_block.yaml index 30cb281457fc9930109bc07a9a36167f14ac71fc..256d420111befaf8652e6ccb8ed2f6226f54d681 100644 --- a/dataset_info/Airbot_MMK2_take_block.yaml +++ b/dataset_info/Airbot_MMK2_take_block.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: building_blocks level1: toys level2: building_blocks @@ -45,29 +45,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the building blocks off the white plate by hands. +task_instruction: +- take the building blocks off the white plate by hands. sub_tasks: -- End -- Place red rectangular block on the table with the left gripper -- Grasp the red rectangular block on the plate with the left gripper -- Grasp the green rectangular block on the plate with the right gripper -- Place green rectangular block on the table with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place red rectangular block on the table with the left gripper + subtask_index: 1 +- subtask: Grasp the red rectangular block on the plate with the left gripper + subtask_index: 2 +- subtask: Grasp the green rectangular block on the plate with the right gripper + subtask_index: 3 +- subtask: Place green rectangular block on the table with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +83,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +94,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 10270 fps: 30 @@ -118,11 +121,9 @@ data_structure: "Airbot_MMK2_take_block_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -392,7 +393,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -400,7 +401,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -427,132 +427,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_block - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the building blocks off the white plate by hands. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place red rectangular block on the table with the left gripper - subtask_index: 1 - - subtask: Grasp the red rectangular block on the plate with the left gripper - subtask_index: 2 - - subtask: Grasp the green rectangular block on the plate with the right gripper - subtask_index: 3 - - subtask: Place green rectangular block on the table with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 10270 - dataset_size: 334.13 MB - data_structure: "Airbot_MMK2_take_block_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_block_both_hands.yaml b/dataset_info/Airbot_MMK2_take_block_both_hands.yaml index 924632b71afae435a76ac2849d769be05414155c..a8270c000349dec7d630656b1ce9375aae534315 100644 --- a/dataset_info/Airbot_MMK2_take_block_both_hands.yaml +++ b/dataset_info/Airbot_MMK2_take_block_both_hands.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: square_building_blocks level1: toys level2: square_building_blocks @@ -45,31 +45,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the building blocks simultaneously with both hands and take - them out of the paper box. +task_instruction: +- pick up the building blocks simultaneously with both hands and take them out of + the paper box. sub_tasks: -- End -- Grasp the blue cube block on the paper box with the left gripper -- Static -- Grasp the green cube block on the paper box with the right gripper -- Place the green cube block on the table with the right gripper -- Place the blue cube block on the table with the left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the blue cube block on the paper box with the left gripper + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Grasp the green cube block on the paper box with the right gripper + subtask_index: 3 +- subtask: Place the green cube block on the table with the right gripper + subtask_index: 4 +- subtask: Place the blue cube block on the table with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +86,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +97,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 47 total_frames: 4793 fps: 30 @@ -121,11 +125,9 @@ data_structure: "Airbot_MMK2_take_block_both_hands_qced_hardlink/\n|-- annotatio \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:46 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +397,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +405,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,135 +431,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_block_both_hands - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the building blocks simultaneously with both hands and take them out of - the paper box. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the blue cube block on the paper box with the left gripper - subtask_index: 1 - - subtask: Static - subtask_index: 2 - - subtask: Grasp the green cube block on the paper box with the right gripper - subtask_index: 3 - - subtask: Place the green cube block on the table with the right gripper - subtask_index: 4 - - subtask: Place the blue cube block on the table with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 4793 - dataset_size: 221.66 MB - data_structure: "Airbot_MMK2_take_block_both_hands_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (35 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_take_book.yaml b/dataset_info/Airbot_MMK2_take_book.yaml index 41e5c36a38cba580c08736d21bbd145d34252c65..2980711c05b485ddede4e2e35250c8e806e2e214 100644 --- a/dataset_info/Airbot_MMK2_take_book.yaml +++ b/dataset_info/Airbot_MMK2_take_book.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: book level1: stationery level2: book @@ -39,29 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take a book from the forward pile with hand. +task_instruction: +- take a book from the forward pile with hand. sub_tasks: -- End -- Place the book on the table with the right gripper -- Hook the third book on the right with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the book on the table with the right gripper + subtask_index: 1 +- subtask: Hook the third book on the right with the right gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - clip - pull - pick - takeout - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -70,13 +75,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -84,8 +86,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 28489 fps: 30 @@ -112,11 +113,9 @@ data_structure: "Airbot_MMK2_take_book_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -386,7 +385,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -394,7 +393,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -421,130 +419,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_book - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take a book from the forward pile with hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the book on the table with the right gripper - subtask_index: 1 - - subtask: Hook the third book on the right with the right gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - clip - - pull - - pick - - takeout - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 28489 - dataset_size: 1.03 GB - data_structure: "Airbot_MMK2_take_book_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_bottle_umbrella.yaml b/dataset_info/Airbot_MMK2_take_bottle_umbrella.yaml index 62f751a030c9216f64ef9cc5b8fc794300610f98..209a605b9f8bae8ac252736ffde09524e2e5f5f5 100644 --- a/dataset_info/Airbot_MMK2_take_bottle_umbrella.yaml +++ b/dataset_info/Airbot_MMK2_take_bottle_umbrella.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: empty_bottle level1: mineral_water level2: empty_bottle @@ -51,30 +51,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the umbrella off the lid with right hand, and then remove the - bottle from the lid with left hand. +task_instruction: +- take the umbrella off the lid with right hand, and then remove the bottle from the + lid with left hand. sub_tasks: -- Grasp the umbrella placed on the white lid with the left gripper -- End -- Grasp the Yibao placed on the white lid with the left gripper -- Place the umbrella on the table with the right gripper -- Place the Yibao on the table with the left gripper -- 'null' +- subtask: ' Grasp the umbrella placed on the white lid with the left gripper' + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the Yibao placed on the white lid with the left gripper + subtask_index: 2 +- subtask: Place the umbrella on the table with the right gripper + subtask_index: 3 +- subtask: Place the Yibao on the table with the left gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +90,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +101,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 54 total_frames: 18663 fps: 30 @@ -126,11 +129,9 @@ data_structure: "Airbot_MMK2_take_bottle_umbrella_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:53 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +401,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +409,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,133 +435,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_bottle_umbrella - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the umbrella off the lid with right hand, and then remove the bottle from - the lid with left hand. - sub_tasks: - - subtask: ' Grasp the umbrella placed on the white lid with the left gripper' - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the Yibao placed on the white lid with the left gripper - subtask_index: 2 - - subtask: Place the umbrella on the table with the right gripper - subtask_index: 3 - - subtask: Place the Yibao on the table with the left gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 18663 - dataset_size: 759.87 MB - data_structure: "Airbot_MMK2_take_bottle_umbrella_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (42 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_bowl_sponge.yaml b/dataset_info/Airbot_MMK2_take_bowl_sponge.yaml index c4818fddf21589cea9984a353094d58707ffbfb6..00dc179f955c813daa783decd8a173f3c4437a86 100644 --- a/dataset_info/Airbot_MMK2_take_bowl_sponge.yaml +++ b/dataset_info/Airbot_MMK2_take_bowl_sponge.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cleaning_sponge level1: cleaning level2: cleaning_sponge @@ -51,30 +51,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the sponge and bowl by hand and place them on the table. +task_instruction: +- pick up the sponge and bowl by hand and place them on the table. sub_tasks: -- Place the sponge on the table with the left gripper -- Abnormal -- Grasp the sponge on the white basket and with the left gripper -- Grasp the bowl on the white basket and with the right gripper -- End -- Place the bowl on the table with the right gripper -- 'null' +- subtask: Place the sponge on the table with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Grasp the sponge on the white basket and with the left gripper + subtask_index: 2 +- subtask: Grasp the bowl on the white basket and with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the bowl on the table with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 42 total_frames: 11308 fps: 30 @@ -126,11 +130,9 @@ data_structure: "Airbot_MMK2_take_bowl_sponge_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:41 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +402,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +410,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,134 +436,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_bowl_sponge - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the sponge and bowl by hand and place them on the table. - sub_tasks: - - subtask: Place the sponge on the table with the left gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Grasp the sponge on the white basket and with the left gripper - subtask_index: 2 - - subtask: Grasp the bowl on the white basket and with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the bowl on the table with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 11308 - dataset_size: 373.70 MB - data_structure: "Airbot_MMK2_take_bowl_sponge_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (30 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_cake_both_hands.yaml b/dataset_info/Airbot_MMK2_take_cake_both_hands.yaml index f14d857821ca61843737b54c64319160a25abc7a..9342a1193d9181a50ff4b7aa80311c3378da6c64 100644 --- a/dataset_info/Airbot_MMK2_take_cake_both_hands.yaml +++ b/dataset_info/Airbot_MMK2_take_cake_both_hands.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: chocolate_cake level1: snacks level2: chocolate_cake @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the cake out of the basket with both hands and place it on - the table. +task_instruction: +- take the cake out of the basket with both hands and place it on the table. sub_tasks: -- Place the cake on the table with the right gripper -- Place the cake on the table with the left gripper -- Grasp the cake from the white basket with the left gripper -- Grasp the cake from the white basket with the right gripper -- End -- 'null' +- subtask: Place the cake on the table with the right gripper + subtask_index: 0 +- subtask: Place the cake on the table with the left gripper + subtask_index: 1 +- subtask: Grasp the cake from the white basket with the left gripper + subtask_index: 2 +- subtask: Grasp the cake from the white basket with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 48 total_frames: 6044 fps: 30 @@ -126,11 +128,9 @@ data_structure: "Airbot_MMK2_take_cake_both_hands_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:47 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,132 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_cake_both_hands - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the cake out of the basket with both hands and place it on the table. - sub_tasks: - - subtask: Place the cake on the table with the right gripper - subtask_index: 0 - - subtask: Place the cake on the table with the left gripper - subtask_index: 1 - - subtask: Grasp the cake from the white basket with the left gripper - subtask_index: 2 - - subtask: Grasp the cake from the white basket with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6044 - dataset_size: 232.01 MB - data_structure: "Airbot_MMK2_take_cake_both_hands_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_take_cup.yaml b/dataset_info/Airbot_MMK2_take_cup.yaml index 838be1e129e929dd42c09ea61e331cf524424877..b2e561861bd6231bb9197a29d157f1b0d542d435 100644 --- a/dataset_info/Airbot_MMK2_take_cup.yaml +++ b/dataset_info/Airbot_MMK2_take_cup.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: cup level1: cups level2: cup @@ -45,30 +45,37 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the cup off the white lid and place them on the table by hands. +task_instruction: +- take the cup off the white lid and place them on the table by hands. sub_tasks: -- place the cup in the table use the left gripper -- place the cup in the table use the right gripper -- Grasp the cup the left gripper -- Grasp the cup the right gripper -- End -- 'null' +- subtask: place the cup in the table use the left gripper + subtask_index: 0 +- subtask: place the cup in the table use the right gripper + subtask_index: 1 +- subtask: Grasp the cup the left gripper + subtask_index: 2 +- subtask: Grasp the cup the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - pinch - place - clip - takeout -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +84,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +95,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 35 total_frames: 5435 fps: 30 @@ -179,11 +182,9 @@ data_structure: 'Airbot_MMK2_take_cup_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:34 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -453,7 +454,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -461,7 +462,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -488,192 +488,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_cup - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: catering - level2: cafe - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the cup off the white lid and place them on the table by hands. - sub_tasks: - - subtask: place the cup in the table use the left gripper - subtask_index: 0 - - subtask: place the cup in the table use the right gripper - subtask_index: 1 - - subtask: Grasp the cup the left gripper - subtask_index: 2 - - subtask: Grasp the cup the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - pinch - - place - - clip - - takeout - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 5435 - dataset_size: 169.73 MB - data_structure: 'Airbot_MMK2_take_cup_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (23 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_front_rgb - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_take_dog_doll.yaml b/dataset_info/Airbot_MMK2_take_dog_doll.yaml index ed400bc85ac5789eb65b046427618c2b44a2cccf..2d5af016f87e76530fc1f4c1590d58dcb8f2f8de 100644 --- a/dataset_info/Airbot_MMK2_take_dog_doll.yaml +++ b/dataset_info/Airbot_MMK2_take_dog_doll.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: toy_dog level1: doll level2: toy_dog @@ -51,28 +51,32 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the toy dog from the white lid by hand and place it on the - table. +task_instruction: +- pick up the toy dog from the white lid by hand and place it on the table. sub_tasks: -- Place the puppy on the table with the left and right grippers -- Grasp the puppy with the left and right grippers -- End -- 'null' +- subtask: Place the puppy on the table with the left and right grippers + subtask_index: 0 +- subtask: Grasp the puppy with the left and right grippers + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 6425 fps: 30 @@ -123,11 +123,9 @@ data_structure: "Airbot_MMK2_take_dog_doll_qced_hardlink/\n|-- annotations\n| | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -397,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -405,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -432,128 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_dog_doll - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the toy dog from the white lid by hand and place it on the table. - sub_tasks: - - subtask: Place the puppy on the table with the left and right grippers - subtask_index: 0 - - subtask: Grasp the puppy with the left and right grippers - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6425 - dataset_size: 253.22 MB - data_structure: "Airbot_MMK2_take_dog_doll_qced_hardlink/\n|-- annotations\n| \ - \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ - \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_take_drink.yaml b/dataset_info/Airbot_MMK2_take_drink.yaml index 266bb03e8dbed375c05b8c7363eafbd33be28bd7..d4af526226c6d5a8c79b2c11c7665b77342ed003 100644 --- a/dataset_info/Airbot_MMK2_take_drink.yaml +++ b/dataset_info/Airbot_MMK2_take_drink.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: any_beverages level1: beverages level2: any_beverages @@ -45,33 +45,44 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the drink by hand and put it on the table. +task_instruction: +- pick up the drink by hand and put it on the table. sub_tasks: -- Place the coffee on the table with the left gripper -- Place the vitamin B water on the table with the left gripper -- Abnormal -- Place the vitamin B water on the table with the right gripper -- Place the coffee on the table with the right gripper -- Grasp the vitamin B water on the white lid with the left gripper -- Grasp the vitamin B water on the white lid with the right gripper -- End -- Grasp the coffee on the white lid with the right gripper -- 'null' +- subtask: Place the coffee on the table with the left gripper + subtask_index: 0 +- subtask: Place the vitamin B water on the table with the left gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Place the vitamin B water on the table with the right gripper + subtask_index: 3 +- subtask: Place the coffee on the table with the right gripper + subtask_index: 4 +- subtask: Grasp the vitamin B water on the white lid with the left gripper + subtask_index: 5 +- subtask: Grasp the vitamin B water on the white lid with the right gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Grasp the coffee on the white lid with the right gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +91,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 86 total_frames: 17345 fps: 30 @@ -122,11 +129,9 @@ data_structure: "Airbot_MMK2_take_drink_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:85 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -396,7 +401,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -404,7 +409,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -431,140 +435,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_drink - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the drink by hand and put it on the table. - sub_tasks: - - subtask: Place the coffee on the table with the left gripper - subtask_index: 0 - - subtask: Place the vitamin B water on the table with the left gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: Place the vitamin B water on the table with the right gripper - subtask_index: 3 - - subtask: Place the coffee on the table with the right gripper - subtask_index: 4 - - subtask: Grasp the vitamin B water on the white lid with the left gripper - subtask_index: 5 - - subtask: Grasp the vitamin B water on the white lid with the right gripper - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: Grasp the coffee on the white lid with the right gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 17345 - dataset_size: 570.83 MB - data_structure: "Airbot_MMK2_take_drink_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (74 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_egg.yaml b/dataset_info/Airbot_MMK2_take_egg.yaml index 07184a9867cf261e68f8b8aeabe41e2865b4276c..bf0e934876afcd3e47c501eaffb0008e6e66d44f 100644 --- a/dataset_info/Airbot_MMK2_take_egg.yaml +++ b/dataset_info/Airbot_MMK2_take_egg.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: egg_carton level1: disposable_items level2: egg_carton @@ -45,27 +45,32 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take out the brown eggs from the egg box. +task_instruction: +- take out the brown eggs from the egg box. sub_tasks: -- End -- Place the egg on the table with the right gripper -- Grasp the egg from the egg storage box with the right gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Place the egg on the table with the right gripper + subtask_index: 1 +- subtask: Grasp the egg from the egg storage box with the right gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -74,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -88,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 101 total_frames: 18643 fps: 30 @@ -116,11 +117,9 @@ data_structure: "Airbot_MMK2_take_egg_qced_hardlink/\n|-- annotations\n| |-- e | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:100 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -390,7 +389,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -398,7 +397,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -425,128 +423,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_egg - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: catering - level2: restaurant - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take out the brown eggs from the egg box. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Place the egg on the table with the right gripper - subtask_index: 1 - - subtask: Grasp the egg from the egg storage box with the right gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 18643 - dataset_size: 758.58 MB - data_structure: "Airbot_MMK2_take_egg_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ - | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ - | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ - | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n\ - | `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ - | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ - \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |--\ - \ episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n\ - | |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| \ - \ `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n\ - | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ - \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ - \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ - \ `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_electronics.yaml b/dataset_info/Airbot_MMK2_take_electronics.yaml index d78488044c711337856ff74518e5adb6b016076f..87cbb9c778d7a746d404512c249dedee6ef39104 100644 --- a/dataset_info/Airbot_MMK2_take_electronics.yaml +++ b/dataset_info/Airbot_MMK2_take_electronics.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: paper_boxes level1: baskets level2: paper_boxes @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the calculator box and mouse box off the lid and place them - on the table. +task_instruction: +- take the calculator box and mouse box off the lid and place them on the table. sub_tasks: -- Abnormal -- Place the mouse box on the table with the left gripper -- Grasp the calculator box on the white lid and with the right gripper -- Grasp the mouse box on the white lid and with the left gripper -- End -- Place the calculator box on the table with the right gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Place the mouse box on the table with the left gripper + subtask_index: 1 +- subtask: Grasp the calculator box on the white lid and with the right gripper + subtask_index: 2 +- subtask: Grasp the mouse box on the white lid and with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the calculator box on the table with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 13465 fps: 30 @@ -121,11 +124,9 @@ data_structure: "Airbot_MMK2_take_electronics_qced_hardlink/\n|-- annotations\n| \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -395,7 +396,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -403,7 +404,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -430,134 +430,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_electronics - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: industry - level2: factory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the calculator box and mouse box off the lid and place them on the table. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Place the mouse box on the table with the left gripper - subtask_index: 1 - - subtask: Grasp the calculator box on the white lid and with the right gripper - subtask_index: 2 - - subtask: Grasp the mouse box on the white lid and with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the calculator box on the table with the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 13465 - dataset_size: 400.20 MB - data_structure: "Airbot_MMK2_take_electronics_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_take_part_both_hands.yaml b/dataset_info/Airbot_MMK2_take_part_both_hands.yaml index 638021a88cad8edf17c286f38c5ea7070cbc39c8..4cdd4e9da83b95e15fb7b06375f41c9aa7f54f63 100644 --- a/dataset_info/Airbot_MMK2_take_part_both_hands.yaml +++ b/dataset_info/Airbot_MMK2_take_part_both_hands.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: item level1: item level2: item @@ -51,30 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the items off the building blocks with both hands and place - them on the table. +task_instruction: +- take the items off the building blocks with both hands and place them on the table. sub_tasks: -- Grasp the steel tube on the cube block with the right gripper -- Grasp the steel tube on the cube block with the left gripper -- Place the steel tube on the table with the left gripper -- Place the steel tube on the table with the right gripper -- End -- 'null' +- subtask: Grasp the steel tube on the cube block with the right gripper + subtask_index: 0 +- subtask: Grasp the steel tube on the cube block with the left gripper + subtask_index: 1 +- subtask: Place the steel tube on the table with the left gripper + subtask_index: 2 +- subtask: Place the steel tube on the table with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -83,13 +89,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -97,8 +100,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 6348 fps: 30 @@ -126,11 +128,9 @@ data_structure: "Airbot_MMK2_take_part_both_hands_qced_hardlink/\n|-- annotation \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -400,7 +400,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -408,7 +408,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -435,132 +434,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_part_both_hands - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the items off the building blocks with both hands and place them on the table. - sub_tasks: - - subtask: Grasp the steel tube on the cube block with the right gripper - subtask_index: 0 - - subtask: Grasp the steel tube on the cube block with the left gripper - subtask_index: 1 - - subtask: Place the steel tube on the table with the left gripper - subtask_index: 2 - - subtask: Place the steel tube on the table with the right gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 6348 - dataset_size: 239.11 MB - data_structure: "Airbot_MMK2_take_part_both_hands_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_take_tissues.yaml b/dataset_info/Airbot_MMK2_take_tissues.yaml index a2d20dbdc6f3bc86dd04f4d6d77c0f6ab817f616..4d6864946b0879dac0d95a695c763a9ac9dafd21 100644 --- a/dataset_info/Airbot_MMK2_take_tissues.yaml +++ b/dataset_info/Airbot_MMK2_take_tissues.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: tissue_paper level1: kitchen_supplies level2: tissue_paper @@ -45,30 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the tissue with your hand and put it on the table. +task_instruction: +- pick up the tissue with your hand and put it on the table. sub_tasks: -- Place the tissue on the table with the right gripper -- End -- Grasp the tissue on the white lid with the right gripper -- Place the tissue on the table with the left gripper -- Abnormal -- Grasp the tissue on the white lid with the left gripper -- 'null' +- subtask: Place the tissue on the table with the right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the tissue on the white lid with the right gripper + subtask_index: 2 +- subtask: Place the tissue on the table with the left gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Grasp the tissue on the white lid with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 8995 fps: 30 @@ -119,11 +123,9 @@ data_structure: "Airbot_MMK2_take_tissues_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,134 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_tissues - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: laboratory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the tissue with your hand and put it on the table. - sub_tasks: - - subtask: Place the tissue on the table with the right gripper - subtask_index: 0 - - subtask: End - subtask_index: 1 - - subtask: Grasp the tissue on the white lid with the right gripper - subtask_index: 2 - - subtask: Place the tissue on the table with the left gripper - subtask_index: 3 - - subtask: Abnormal - subtask_index: 4 - - subtask: Grasp the tissue on the white lid with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8995 - dataset_size: 280.64 MB - data_structure: "Airbot_MMK2_take_tissues_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Airbot_MMK2_take_toy_car.yaml b/dataset_info/Airbot_MMK2_take_toy_car.yaml index 7d2468247381722fa4320dff3d2d7dfb83e6dd51..818cdd45b2524a928a547aefa4154b9d5bed18ce 100644 --- a/dataset_info/Airbot_MMK2_take_toy_car.yaml +++ b/dataset_info/Airbot_MMK2_take_toy_car.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: toy_car level1: doll level2: toy_car @@ -45,30 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: take the toy car out of the plate by hand and place it on the table. +task_instruction: +- take the toy car out of the plate by hand and place it on the table. sub_tasks: -- Grasp the toy car on the plate and with the left gripper -- Grasp the toy car on the plate and with the right gripper -- Abnormal -- End -- Place the toy car on the table with the right gripper -- Place the toy car on the table with the left gripper -- 'null' +- subtask: Grasp the toy car on the plate and with the left gripper + subtask_index: 0 +- subtask: Grasp the toy car on the plate and with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the toy car on the table with the right gripper + subtask_index: 4 +- subtask: Place the toy car on the table with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -77,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 49 total_frames: 14333 fps: 30 @@ -119,11 +123,9 @@ data_structure: "Airbot_MMK2_take_toy_car_qced_hardlink/\n|-- annotations\n| | | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:48 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -393,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -401,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -428,134 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_take_toy_car - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: other - level2: laboratory - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - take the toy car out of the plate by hand and place it on the table. - sub_tasks: - - subtask: Grasp the toy car on the plate and with the left gripper - subtask_index: 0 - - subtask: Grasp the toy car on the plate and with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the toy car on the table with the right gripper - subtask_index: 4 - - subtask: Place the toy car on the table with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 14333 - dataset_size: 457.72 MB - data_structure: "Airbot_MMK2_take_toy_car_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_turn_page.yaml b/dataset_info/Airbot_MMK2_turn_page.yaml index f69f08260dd14fd4c212e527ff4daeeae2d311ba..6aa1eaf79dd05feec00f66da318d3897993bd875 100644 --- a/dataset_info/Airbot_MMK2_turn_page.yaml +++ b/dataset_info/Airbot_MMK2_turn_page.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: book level1: stationery level2: books @@ -39,25 +39,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: turn a page of the english book on the table with right hand. +task_instruction: +- turn a page of the english book on the table with right hand. sub_tasks: -- End -- Turn the book to the next page with the right gripper -- Abnormal -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Turn the book to the next page with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - flip -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -66,13 +71,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -80,8 +82,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 149 total_frames: 19581 fps: 30 @@ -108,11 +109,9 @@ data_structure: "Airbot_MMK2_turn_page_qced_hardlink/\n|-- annotations\n| |-- | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:148 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -382,7 +381,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -390,7 +389,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -417,126 +415,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_turn_page - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: study_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - turn a page of the english book on the table with right hand. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Turn the book to the next page with the right gripper - subtask_index: 1 - - subtask: Abnormal - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - flip - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 19581 - dataset_size: 740.88 MB - data_structure: "Airbot_MMK2_turn_page_qced_hardlink/\n|-- annotations\n| |--\ - \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ - \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n|\ - \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (137 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_unplug.yaml b/dataset_info/Airbot_MMK2_unplug.yaml index 30d6aeb230ca31fc4a3a3ff1d32075935b30545b..b27a9f0c41b64ca262204e651400896f9bbf56fc 100644 --- a/dataset_info/Airbot_MMK2_unplug.yaml +++ b/dataset_info/Airbot_MMK2_unplug.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: power_strips level1: appliances level2: power_strips @@ -45,31 +45,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left hand secures the power strip, while the right hand unplugs - the charger and sets it down. +task_instruction: +- the left hand secures the power strip, while the right hand unplugs the charger + and sets it down. sub_tasks: -- End -- Release socket with the left hand -- Put down plug with the right hand -- Press and hold the socket with the left hand -- Unplug plug with the right hand -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Release socket with the left hand + subtask_index: 1 +- subtask: Put down plug with the right hand + subtask_index: 2 +- subtask: Press and hold the socket with the left hand + subtask_index: 3 +- subtask: Unplug plug with the right hand + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - uncap - presss - place - pick -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -78,13 +85,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 90 total_frames: 21487 fps: 30 @@ -120,11 +123,9 @@ data_structure: "Airbot_MMK2_unplug_qced_hardlink/\n|-- annotations\n| |-- eef | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:89 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -394,7 +395,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -402,7 +403,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -429,134 +429,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_unplug - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left hand secures the power strip, while the right hand unplugs the charger - and sets it down. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Release socket with the left hand - subtask_index: 1 - - subtask: Put down plug with the right hand - subtask_index: 2 - - subtask: Press and hold the socket with the left hand - subtask_index: 3 - - subtask: Unplug plug with the right hand - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - uncap - - presss - - place - - pick - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 21487 - dataset_size: 870.71 MB - data_structure: "Airbot_MMK2_unplug_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ - | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ - | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ - | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n\ - | `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ - | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ - \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |--\ - \ episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n\ - | |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| \ - \ `-- episode_000011.parquet\n| `-- ... (78 more entries)\n|-- meta\n\ - | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ - \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ - \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ - \ `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml b/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml index a12bdd44a6694d2c9a7329ba8263944d2ed55f68..89fc4e57ad99049dc46ce4ed164d4a8bc6198c14 100644 --- a/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml +++ b/dataset_info/Airbot_MMK2_unscrew_bottle_cap.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: beverages level1: beverages level2: beverages @@ -39,32 +39,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the bottle with left hand and unscrew the cap with right - hand. +task_instruction: +- pick up the bottle with left hand and unscrew the cap with right hand. sub_tasks: -- Grasp the bottle with the left gripper -- Place the bottle on the table with the left gripper -- Lift the bottle up with the left gripper -- Unscrew the bottle cap with the right hand while holding the bottle with the left - hand -- End -- 'null' +- subtask: Grasp the bottle with the left gripper + subtask_index: 0 +- subtask: Place the bottle on the table with the left gripper + subtask_index: 1 +- subtask: Lift the bottle up with the left gripper + subtask_index: 2 +- subtask: Unscrew the bottle cap with the right hand while holding the bottle with + the left hand + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place - turn -robot_name: Airbot_MMK2 +robot_name: +- Airbot_MMK2 end_effector_type: five_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb - cam_front_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -73,13 +79,10 @@ came_info: &id008 cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -87,8 +90,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 16965 fps: 30 @@ -116,11 +118,9 @@ data_structure: "Airbot_MMK2_unscrew_bottle_cap_qced_hardlink/\n|-- annotations\ \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ \ `-- observation.images.cam_right_wrist_rgb" -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -390,7 +390,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -398,7 +398,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -425,134 +424,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Airbot_MMK2_unscrew_bottle_cap - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the bottle with left hand and unscrew the cap with right hand. - sub_tasks: - - subtask: Grasp the bottle with the left gripper - subtask_index: 0 - - subtask: Place the bottle on the table with the left gripper - subtask_index: 1 - - subtask: Lift the bottle up with the left gripper - subtask_index: 2 - - subtask: Unscrew the bottle cap with the right hand while holding the bottle with - the left hand - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - - turn - robot_name: - - Airbot_MMK2 - end_effector_type: five_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 16965 - dataset_size: 632.68 MB - data_structure: "Airbot_MMK2_unscrew_bottle_cap_qced_hardlink/\n|-- annotations\n\ - | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n\ - | |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ - | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ - \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ - | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ - \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |--\ - \ episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n\ - | |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| \ - \ |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `--\ - \ ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ - | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n \ - \ |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ - \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_arrange_baai_then_brain.yaml b/dataset_info/Galaxea_R1_Lite_arrange_baai_then_brain.yaml index a8be867016bd7cb27c97e31ed9583bac03720b14..bffef7ec2b65a30a809199d679df30c184169d96 100644 --- a/dataset_info/Galaxea_R1_Lite_arrange_baai_then_brain.yaml +++ b/dataset_info/Galaxea_R1_Lite_arrange_baai_then_brain.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: early_education_toys level1: buiding_blocks level2: early_education_toys @@ -39,35 +39,45 @@ objects: &id006 level5:operation_platform_height: 77.2 task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the gripper to find blocks with the letters b, a, a, and i on - the table and arrange them into BAAI, then find r and N and turn the arranged baai - into brian. +task_instruction: +- use the gripper to find blocks with the letters b, a, a, and i on the table and + arrange them into BAAI, then find r and N and turn the arranged baai into brian. sub_tasks: -- Abnormal -- Place the second block A in the third location -- Place the block I in the fourth location -- Place the first block A in the second location -- Place the block B in the first location -- End -- Move the block A out of the second location -- Place the block N in the fifth location -- Place the block R in the second location -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Place the second block A in the third location + subtask_index: 1 +- subtask: Place the block I in the fourth location + subtask_index: 2 +- subtask: Place the first block A in the second location + subtask_index: 3 +- subtask: Place the block B in the first location + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Move the block A out of the second location + subtask_index: 6 +- subtask: Place the block N in the fifth location + subtask_index: 7 +- subtask: Place the block R in the second location + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -78,13 +88,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 56 total_frames: 44471 fps: 30 @@ -180,11 +186,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_arrange_baai_then_brain_qced_ha |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:55 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -452,7 +456,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -460,7 +464,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -487,200 +490,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_arrange_baai_then_brain - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: eduction - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the gripper to find blocks with the letters b, a, a, and i on the table and - arrange them into BAAI, then find r and N and turn the arranged baai into brian. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Place the second block A in the third location - subtask_index: 1 - - subtask: Place the block I in the fourth location - subtask_index: 2 - - subtask: Place the first block A in the second location - subtask_index: 3 - - subtask: Place the block B in the first location - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Move the block A out of the second location - subtask_index: 6 - - subtask: Place the block N in the fifth location - subtask_index: 7 - - subtask: Place the block R in the second location - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 44471 - dataset_size: 1.26 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_arrange_baai_then_brain_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (44 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_change_baai_into_brain.yaml b/dataset_info/Galaxea_R1_Lite_change_baai_into_brain.yaml index 99f58dcac5861390fb57720b4ccbe74d8869fc2d..e67ba04bfcba7d54ba1117b3b1b64db593a0d383 100644 --- a/dataset_info/Galaxea_R1_Lite_change_baai_into_brain.yaml +++ b/dataset_info/Galaxea_R1_Lite_change_baai_into_brain.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: buiding_blocks level1: toys level2: buiding_blocks @@ -39,28 +39,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: rearrange the word baai as brain. +task_instruction: +- rearrange the word baai as brain. sub_tasks: -- Disassemble the second character A -- Place the character R between first character B and third character A -- End -- Place the character N after character I -- 'null' +- subtask: Disassemble the second character A + subtask_index: 0 +- subtask: Place the character R between first character B and third character A + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the character N after character I + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -71,13 +77,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -85,8 +88,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 67 total_frames: 83804 fps: 30 @@ -173,11 +175,9 @@ data_structure: 'Galaxea_R1_Lite_change_baai_into_brain_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:66 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -445,7 +445,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -453,7 +453,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -480,189 +479,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_change_baai_into_brain - dataset_uuid: 75dad2ee-672e-402a-823a-198e5e42af62 - scene_type: - level1: eduction - level2: school - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - rearrange the word baai as brain. - sub_tasks: - - subtask: Disassemble the second character A - subtask_index: 0 - - subtask: Place the character R between first character B and third character A - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the character N after character I - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 83804 - dataset_size: 3.61 GB - data_structure: 'Galaxea_R1_Lite_change_baai_into_brain_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (55 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_five.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_five.yaml index 4a50c818af015e0696c055ff5fd7009c1e982e9b..3c6fee8915cfb0605e76d727a4e1eda050a20468 100644 --- a/dataset_info/Galaxea_R1_Lite_classify_object_five.yaml +++ b/dataset_info/Galaxea_R1_Lite_classify_object_five.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_basket level1: baskets level2: brown_basket @@ -69,82 +69,141 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the food in the right basket with the right gripper, and place - the non food items in the left basket with the left gripper. +task_instruction: +- place the food in the right basket with the right gripper, and place the non food + items in the left basket with the left gripper. sub_tasks: -- Grasp the rubiks cube and put it in the left basket -- Grasp the waffle and put it in the right basket -- Grasp the soft cleanser and put it in the left basket -- Grasp the back scratcher and put it in the left basket -- Grasp the apple and put it in the right basket -- End -- Grasp the white eraser and put it in the left basket -- Grasp the square chewing gum and put it in the right basket -- Grasp the power strip and put it in the left basket -- Grasp the cleaning agent and put it in the left basket -- Grasp the soda water and put it in the right basket -- Grasp the spoon and put it in the left basket -- Grasp the duck toys and put it in the left basket -- Grasp the triangle cake and put it in the right basket -- Grasp the compass and put it in the right basket -- Grasp the cookie and put it in the right basket -- Grasp the compass and put it in the left basket -- Grasp the orange and put it in the right basket -- Grasp the ballpoint pen and put it in the left basket -- Grasp the round bread and put it in the right basket -- Grasp the egg yolk pastry and put it in the right basket -- Grasp the lemon and put it in the right basket -- Grasp the soap and put it in the left basket -- Grasp the washing liquid and put it in the left basket -- Grasp the hard cleanser and put it in the left basket -- Grasp the milk and put it in the right basket -- Grasp the black marker and put it in the left basket -- Grasp the banana and put it in the right basket -- Grasp the black glass cup and put it in the left basket -- Grasp the brush and put it in the left basket -- Grasp the bath ball and put it in the left basket -- Grasp the blue towel and put it in the left basket -- Grasp the tea cup and put it in the left basket -- Grasp the peeler and put it in the left basket -- Grasp the brown towel and put it in the left basket -- Grasp the peach and put it in the right basket -- Abnormal -- Grasp the chocolate and put it in the right basket -- Grasp the grey towel and put it in the left basket -- Grasp the canned cola and put it in the right basket -- Grasp the tape and put it in the left basket -- Grasp the bread slice and put it in the right basket -- Grasp the tin and put it in the right basket -- Grasp the soap and put it in the right basket -- Grasp the glasses case and put it in the left basket -- Grasp the yellow duck and put it in the right basket -- Grasp the peach doll and put it in the right basket -- Grasp the blue cup and put it in the left basket -- Grasp the pen container and put it in the left basket -- Grasp the red duck and put it in the left basket -- Grasp the lime and put it in the right basket -- Grasp the long bread and put it in the right basket -- Grasp the yogurt and put it in the right basket -- Grasp the potato chips and put it in the right basket -- Grasp the can and put it in the right basket -- Grasp the ad milk and put it in the right basket -- Grasp the blue marker and put it in the left basket -- 'null' +- subtask: Grasp the rubiks cube and put it in the left basket + subtask_index: 0 +- subtask: Grasp the waffle and put it in the right basket + subtask_index: 1 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 2 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 3 +- subtask: Grasp the apple and put it in the right basket + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 6 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 7 +- subtask: Grasp the power strip and put it in the left basket + subtask_index: 8 +- subtask: Grasp the cleaning agent and put it in the left basket + subtask_index: 9 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 10 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 11 +- subtask: Grasp the duck toys and put it in the left basket + subtask_index: 12 +- subtask: Grasp the triangle cake and put it in the right basket + subtask_index: 13 +- subtask: Grasp the compass and put it in the right basket + subtask_index: 14 +- subtask: Grasp the cookie and put it in the right basket + subtask_index: 15 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 16 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 17 +- subtask: Grasp the ballpoint pen and put it in the left basket + subtask_index: 18 +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 19 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 20 +- subtask: Grasp the lemon and put it in the right basket + subtask_index: 21 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 22 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 23 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 24 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 25 +- subtask: Grasp the black marker and put it in the left basket + subtask_index: 26 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 27 +- subtask: Grasp the black glass cup and put it in the left basket + subtask_index: 28 +- subtask: Grasp the brush and put it in the left basket + subtask_index: 29 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 30 +- subtask: Grasp the blue towel and put it in the left basket + subtask_index: 31 +- subtask: Grasp the tea cup and put it in the left basket + subtask_index: 32 +- subtask: Grasp the peeler and put it in the left basket + subtask_index: 33 +- subtask: Grasp the brown towel and put it in the left basket + subtask_index: 34 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 35 +- subtask: Abnormal + subtask_index: 36 +- subtask: Grasp the chocolate and put it in the right basket + subtask_index: 37 +- subtask: Grasp the grey towel and put it in the left basket + subtask_index: 38 +- subtask: Grasp the canned cola and put it in the right basket + subtask_index: 39 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 40 +- subtask: Grasp the bread slice and put it in the right basket + subtask_index: 41 +- subtask: Grasp the tin and put it in the right basket + subtask_index: 42 +- subtask: Grasp the soap and put it in the right basket + subtask_index: 43 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 44 +- subtask: Grasp the yellow duck and put it in the right basket + subtask_index: 45 +- subtask: Grasp the peach doll and put it in the right basket + subtask_index: 46 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 47 +- subtask: Grasp the pen container and put it in the left basket + subtask_index: 48 +- subtask: Grasp the red duck and put it in the left basket + subtask_index: 49 +- subtask: Grasp the lime and put it in the right basket + subtask_index: 50 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 51 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 52 +- subtask: Grasp the potato chips and put it in the right basket + subtask_index: 53 +- subtask: Grasp the can and put it in the right basket + subtask_index: 54 +- subtask: Grasp the ad milk and put it in the right basket + subtask_index: 55 +- subtask: Grasp the blue marker and put it in the left basket + subtask_index: 56 +- subtask: 'null' + subtask_index: 57 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -155,13 +214,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -169,8 +225,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 195 total_frames: 160875 fps: 30 @@ -257,11 +312,9 @@ data_structure: 'Galaxea_R1_Lite_classify_object_five_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:194 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -529,7 +582,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -537,7 +590,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -564,296 +616,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_classify_object_five - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the food in the right basket with the right gripper, and place the non food - items in the left basket with the left gripper. - sub_tasks: - - subtask: Grasp the rubiks cube and put it in the left basket - subtask_index: 0 - - subtask: Grasp the waffle and put it in the right basket - subtask_index: 1 - - subtask: Grasp the soft cleanser and put it in the left basket - subtask_index: 2 - - subtask: Grasp the back scratcher and put it in the left basket - subtask_index: 3 - - subtask: Grasp the apple and put it in the right basket - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Grasp the white eraser and put it in the left basket - subtask_index: 6 - - subtask: Grasp the square chewing gum and put it in the right basket - subtask_index: 7 - - subtask: Grasp the power strip and put it in the left basket - subtask_index: 8 - - subtask: Grasp the cleaning agent and put it in the left basket - subtask_index: 9 - - subtask: Grasp the soda water and put it in the right basket - subtask_index: 10 - - subtask: Grasp the spoon and put it in the left basket - subtask_index: 11 - - subtask: Grasp the duck toys and put it in the left basket - subtask_index: 12 - - subtask: Grasp the triangle cake and put it in the right basket - subtask_index: 13 - - subtask: Grasp the compass and put it in the right basket - subtask_index: 14 - - subtask: Grasp the cookie and put it in the right basket - subtask_index: 15 - - subtask: Grasp the compass and put it in the left basket - subtask_index: 16 - - subtask: Grasp the orange and put it in the right basket - subtask_index: 17 - - subtask: Grasp the ballpoint pen and put it in the left basket - subtask_index: 18 - - subtask: Grasp the round bread and put it in the right basket - subtask_index: 19 - - subtask: Grasp the egg yolk pastry and put it in the right basket - subtask_index: 20 - - subtask: Grasp the lemon and put it in the right basket - subtask_index: 21 - - subtask: Grasp the soap and put it in the left basket - subtask_index: 22 - - subtask: Grasp the washing liquid and put it in the left basket - subtask_index: 23 - - subtask: Grasp the hard cleanser and put it in the left basket - subtask_index: 24 - - subtask: Grasp the milk and put it in the right basket - subtask_index: 25 - - subtask: Grasp the black marker and put it in the left basket - subtask_index: 26 - - subtask: Grasp the banana and put it in the right basket - subtask_index: 27 - - subtask: Grasp the black glass cup and put it in the left basket - subtask_index: 28 - - subtask: Grasp the brush and put it in the left basket - subtask_index: 29 - - subtask: Grasp the bath ball and put it in the left basket - subtask_index: 30 - - subtask: Grasp the blue towel and put it in the left basket - subtask_index: 31 - - subtask: Grasp the tea cup and put it in the left basket - subtask_index: 32 - - subtask: Grasp the peeler and put it in the left basket - subtask_index: 33 - - subtask: Grasp the brown towel and put it in the left basket - subtask_index: 34 - - subtask: Grasp the peach and put it in the right basket - subtask_index: 35 - - subtask: Abnormal - subtask_index: 36 - - subtask: Grasp the chocolate and put it in the right basket - subtask_index: 37 - - subtask: Grasp the grey towel and put it in the left basket - subtask_index: 38 - - subtask: Grasp the canned cola and put it in the right basket - subtask_index: 39 - - subtask: Grasp the tape and put it in the left basket - subtask_index: 40 - - subtask: Grasp the bread slice and put it in the right basket - subtask_index: 41 - - subtask: Grasp the tin and put it in the right basket - subtask_index: 42 - - subtask: Grasp the soap and put it in the right basket - subtask_index: 43 - - subtask: Grasp the glasses case and put it in the left basket - subtask_index: 44 - - subtask: Grasp the yellow duck and put it in the right basket - subtask_index: 45 - - subtask: Grasp the peach doll and put it in the right basket - subtask_index: 46 - - subtask: Grasp the blue cup and put it in the left basket - subtask_index: 47 - - subtask: Grasp the pen container and put it in the left basket - subtask_index: 48 - - subtask: Grasp the red duck and put it in the left basket - subtask_index: 49 - - subtask: Grasp the lime and put it in the right basket - subtask_index: 50 - - subtask: Grasp the long bread and put it in the right basket - subtask_index: 51 - - subtask: Grasp the yogurt and put it in the right basket - subtask_index: 52 - - subtask: Grasp the potato chips and put it in the right basket - subtask_index: 53 - - subtask: Grasp the can and put it in the right basket - subtask_index: 54 - - subtask: Grasp the ad milk and put it in the right basket - subtask_index: 55 - - subtask: Grasp the blue marker and put it in the left basket - subtask_index: 56 - - subtask: 'null' - subtask_index: 57 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 160875 - dataset_size: 8.91 GB - data_structure: 'Galaxea_R1_Lite_classify_object_five_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (183 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml index eff492aaa2180f571b50b6dfbaef88ffc2836426..7758a0200ac187c652f88c7bdbd34b98db8366ff 100644 --- a/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml +++ b/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_basket level1: baskets level2: brown_basket @@ -69,91 +69,159 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the food in the right basket with the right gripper, and place - the non food items in the left basket with the left gripper. +task_instruction: +- place the food in the right basket with the right gripper, and place the non food + items in the left basket with the left gripper. sub_tasks: -- Grasp the potato chips and put it in the left basket -- Grasp the mineral water and put it in the right basket -- Grasp the rubiks cube and put it in the left basket -- Grasp the waffle and put it in the right basket -- Grasp the soft cleanser and put it in the left basket -- Grasp the back scratcher and put it in the left basket -- Grasp the apple and put it in the right basket -- End -- Grasp the white eraser and put it in the left basket -- Grasp the square chewing gum and put it in the right basket -- Grasp the power strip and put it in the left basket -- Grasp the green lemon and put it in the right basket -- Grasp the coke and put it in the right basket -- Grasp the cleaning agent and put it in the left basket -- Grasp the soda water and put it in the right basket -- Grasp the spoon and put it in the left basket -- Grasp the duck toys and put it in the left basket -- Grasp the triangle cake and put it in the right basket -- Grasp the cookie and put it in the right basket -- Grasp the yellow cake and put it in the right basket -- Grasp the shower sphere and put it in the left basket -- Grasp the compass and put it in the left basket -- Grasp the orange and put it in the right basket -- Grasp the broom and put it in the left basket -- Grasp the back scratcher and put it in the right basket -- Grasp the ballpoint pen and put it in the left basket -- Grasp the round bread and put it in the right basket -- Grasp the egg yolk pastry and put it in the right basket -- Grasp the soap and put it in the left basket -- Grasp the washing liquid and put it in the left basket -- Grasp the hard cleanser and put it in the left basket -- Grasp the milk and put it in the right basket -- Grasp the black marker and put it in the left basket -- Grasp the banana and put it in the right basket -- Grasp the can and put it in the left basket -- Grasp the black glass cup and put it in the left basket -- Grasp the brush and put it in the left basket -- Grasp the bath ball and put it in the left basket -- Grasp the blue towel and put it in the left basket -- Grasp the peeler and put it in the left basket -- Grasp the brown towel and put it in the left basket -- Grasp the peach and put it in the right basket -- Grasp the tea cup and put it in the left basket -- Grasp the round bread and put it in the left basket -- Grasp the chocolate and put it in the right basket -- Grasp the grey towel and put it in the left basket -- Grasp the canned cola and put it in the right basket -- Grasp the tape and put it in the left basket -- Grasp the bread slice and put it in the right basket -- Grasp the glasses case and put it in the left basket -- Grasp the triangle cake and put it in the left basket -- Grasp the peach doll and put it in the right basket -- Grasp the blue cup and put it in the left basket -- Grasp the pen container and put it in the left basket -- Grasp the red duck and put it in the left basket -- Grasp the long bread and put it in the right basket -- Grasp the yogurt and put it in the right basket -- Grasp the potato chips and put it in the right basket -- Grasp the can and put it in the right basket -- Grasp the egg beater and put it in the right basket -- Place the cookie in the center of the table -- Grasp the square chewing gum and put it in the left basket -- Grasp the ad milk and put it in the right basket -- Grasp the detergent and put it in the left basket -- Grasp the yellow duck and put it in the left basket -- Grasp the blue marker and put it in the left basket -- 'null' +- subtask: Grasp the potato chips and put it in the left basket + subtask_index: 0 +- subtask: Grasp the mineral water and put it in the right basket + subtask_index: 1 +- subtask: Grasp the rubiks cube and put it in the left basket + subtask_index: 2 +- subtask: Grasp the waffle and put it in the right basket + subtask_index: 3 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 4 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 5 +- subtask: Grasp the apple and put it in the right basket + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 8 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 9 +- subtask: Grasp the power strip and put it in the left basket + subtask_index: 10 +- subtask: Grasp the green lemon and put it in the right basket + subtask_index: 11 +- subtask: Grasp the coke and put it in the right basket + subtask_index: 12 +- subtask: Grasp the cleaning agent and put it in the left basket + subtask_index: 13 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 14 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 15 +- subtask: Grasp the duck toys and put it in the left basket + subtask_index: 16 +- subtask: Grasp the triangle cake and put it in the right basket + subtask_index: 17 +- subtask: Grasp the cookie and put it in the right basket + subtask_index: 18 +- subtask: Grasp the yellow cake and put it in the right basket + subtask_index: 19 +- subtask: Grasp the shower sphere and put it in the left basket + subtask_index: 20 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 21 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 22 +- subtask: Grasp the broom and put it in the left basket + subtask_index: 23 +- subtask: Grasp the back scratcher and put it in the right basket + subtask_index: 24 +- subtask: Grasp the ballpoint pen and put it in the left basket + subtask_index: 25 +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 26 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 27 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 28 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 29 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 30 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 31 +- subtask: Grasp the black marker and put it in the left basket + subtask_index: 32 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 33 +- subtask: Grasp the can and put it in the left basket + subtask_index: 34 +- subtask: Grasp the black glass cup and put it in the left basket + subtask_index: 35 +- subtask: Grasp the brush and put it in the left basket + subtask_index: 36 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 37 +- subtask: Grasp the blue towel and put it in the left basket + subtask_index: 38 +- subtask: Grasp the peeler and put it in the left basket + subtask_index: 39 +- subtask: Grasp the brown towel and put it in the left basket + subtask_index: 40 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 41 +- subtask: Grasp the tea cup and put it in the left basket + subtask_index: 42 +- subtask: Grasp the round bread and put it in the left basket + subtask_index: 43 +- subtask: Grasp the chocolate and put it in the right basket + subtask_index: 44 +- subtask: Grasp the grey towel and put it in the left basket + subtask_index: 45 +- subtask: Grasp the canned cola and put it in the right basket + subtask_index: 46 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 47 +- subtask: Grasp the bread slice and put it in the right basket + subtask_index: 48 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 49 +- subtask: Grasp the triangle cake and put it in the left basket + subtask_index: 50 +- subtask: Grasp the peach doll and put it in the right basket + subtask_index: 51 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 52 +- subtask: Grasp the pen container and put it in the left basket + subtask_index: 53 +- subtask: Grasp the red duck and put it in the left basket + subtask_index: 54 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 55 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 56 +- subtask: Grasp the potato chips and put it in the right basket + subtask_index: 57 +- subtask: Grasp the can and put it in the right basket + subtask_index: 58 +- subtask: Grasp the egg beater and put it in the right basket + subtask_index: 59 +- subtask: Place the cookie in the center of the table + subtask_index: 60 +- subtask: Grasp the square chewing gum and put it in the left basket + subtask_index: 61 +- subtask: Grasp the ad milk and put it in the right basket + subtask_index: 62 +- subtask: Grasp the detergent and put it in the left basket + subtask_index: 63 +- subtask: Grasp the yellow duck and put it in the left basket + subtask_index: 64 +- subtask: Grasp the blue marker and put it in the left basket + subtask_index: 65 +- subtask: 'null' + subtask_index: 66 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -164,13 +232,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -178,8 +243,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 191 total_frames: 153386 fps: 30 @@ -266,11 +330,9 @@ data_structure: 'Galaxea_R1_Lite_classify_object_four_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:190 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -538,7 +600,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -546,7 +608,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -573,314 +634,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_classify_object_four - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the food in the right basket with the right gripper, and place the non food - items in the left basket with the left gripper. - sub_tasks: - - subtask: Grasp the potato chips and put it in the left basket - subtask_index: 0 - - subtask: Grasp the mineral water and put it in the right basket - subtask_index: 1 - - subtask: Grasp the rubiks cube and put it in the left basket - subtask_index: 2 - - subtask: Grasp the waffle and put it in the right basket - subtask_index: 3 - - subtask: Grasp the soft cleanser and put it in the left basket - subtask_index: 4 - - subtask: Grasp the back scratcher and put it in the left basket - subtask_index: 5 - - subtask: Grasp the apple and put it in the right basket - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: Grasp the white eraser and put it in the left basket - subtask_index: 8 - - subtask: Grasp the square chewing gum and put it in the right basket - subtask_index: 9 - - subtask: Grasp the power strip and put it in the left basket - subtask_index: 10 - - subtask: Grasp the green lemon and put it in the right basket - subtask_index: 11 - - subtask: Grasp the coke and put it in the right basket - subtask_index: 12 - - subtask: Grasp the cleaning agent and put it in the left basket - subtask_index: 13 - - subtask: Grasp the soda water and put it in the right basket - subtask_index: 14 - - subtask: Grasp the spoon and put it in the left basket - subtask_index: 15 - - subtask: Grasp the duck toys and put it in the left basket - subtask_index: 16 - - subtask: Grasp the triangle cake and put it in the right basket - subtask_index: 17 - - subtask: Grasp the cookie and put it in the right basket - subtask_index: 18 - - subtask: Grasp the yellow cake and put it in the right basket - subtask_index: 19 - - subtask: Grasp the shower sphere and put it in the left basket - subtask_index: 20 - - subtask: Grasp the compass and put it in the left basket - subtask_index: 21 - - subtask: Grasp the orange and put it in the right basket - subtask_index: 22 - - subtask: Grasp the broom and put it in the left basket - subtask_index: 23 - - subtask: Grasp the back scratcher and put it in the right basket - subtask_index: 24 - - subtask: Grasp the ballpoint pen and put it in the left basket - subtask_index: 25 - - subtask: Grasp the round bread and put it in the right basket - subtask_index: 26 - - subtask: Grasp the egg yolk pastry and put it in the right basket - subtask_index: 27 - - subtask: Grasp the soap and put it in the left basket - subtask_index: 28 - - subtask: Grasp the washing liquid and put it in the left basket - subtask_index: 29 - - subtask: Grasp the hard cleanser and put it in the left basket - subtask_index: 30 - - subtask: Grasp the milk and put it in the right basket - subtask_index: 31 - - subtask: Grasp the black marker and put it in the left basket - subtask_index: 32 - - subtask: Grasp the banana and put it in the right basket - subtask_index: 33 - - subtask: Grasp the can and put it in the left basket - subtask_index: 34 - - subtask: Grasp the black glass cup and put it in the left basket - subtask_index: 35 - - subtask: Grasp the brush and put it in the left basket - subtask_index: 36 - - subtask: Grasp the bath ball and put it in the left basket - subtask_index: 37 - - subtask: Grasp the blue towel and put it in the left basket - subtask_index: 38 - - subtask: Grasp the peeler and put it in the left basket - subtask_index: 39 - - subtask: Grasp the brown towel and put it in the left basket - subtask_index: 40 - - subtask: Grasp the peach and put it in the right basket - subtask_index: 41 - - subtask: Grasp the tea cup and put it in the left basket - subtask_index: 42 - - subtask: Grasp the round bread and put it in the left basket - subtask_index: 43 - - subtask: Grasp the chocolate and put it in the right basket - subtask_index: 44 - - subtask: Grasp the grey towel and put it in the left basket - subtask_index: 45 - - subtask: Grasp the canned cola and put it in the right basket - subtask_index: 46 - - subtask: Grasp the tape and put it in the left basket - subtask_index: 47 - - subtask: Grasp the bread slice and put it in the right basket - subtask_index: 48 - - subtask: Grasp the glasses case and put it in the left basket - subtask_index: 49 - - subtask: Grasp the triangle cake and put it in the left basket - subtask_index: 50 - - subtask: Grasp the peach doll and put it in the right basket - subtask_index: 51 - - subtask: Grasp the blue cup and put it in the left basket - subtask_index: 52 - - subtask: Grasp the pen container and put it in the left basket - subtask_index: 53 - - subtask: Grasp the red duck and put it in the left basket - subtask_index: 54 - - subtask: Grasp the long bread and put it in the right basket - subtask_index: 55 - - subtask: Grasp the yogurt and put it in the right basket - subtask_index: 56 - - subtask: Grasp the potato chips and put it in the right basket - subtask_index: 57 - - subtask: Grasp the can and put it in the right basket - subtask_index: 58 - - subtask: Grasp the egg beater and put it in the right basket - subtask_index: 59 - - subtask: Place the cookie in the center of the table - subtask_index: 60 - - subtask: Grasp the square chewing gum and put it in the left basket - subtask_index: 61 - - subtask: Grasp the ad milk and put it in the right basket - subtask_index: 62 - - subtask: Grasp the detergent and put it in the left basket - subtask_index: 63 - - subtask: Grasp the yellow duck and put it in the left basket - subtask_index: 64 - - subtask: Grasp the blue marker and put it in the left basket - subtask_index: 65 - - subtask: 'null' - subtask_index: 66 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 153386 - dataset_size: 8.39 GB - data_structure: 'Galaxea_R1_Lite_classify_object_four_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (179 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_green_tablecloth.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_green_tablecloth.yaml index 586eb17ec79635328af6a86edc1fcd063c1e42b9..25622eb986a23059e93b593c33c82a34f7d2ce87 100644 --- a/dataset_info/Galaxea_R1_Lite_classify_object_green_tablecloth.yaml +++ b/dataset_info/Galaxea_R1_Lite_classify_object_green_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_basket level1: baskets level2: brown_basket @@ -75,182 +75,341 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: on the green table,place the food in the right basket with the right - gripper, and place the non food items in the left basket with the left gripper. +task_instruction: +- on the green table,place the food in the right basket with the right gripper, and + place the non food items in the left basket with the left gripper. sub_tasks: -- Grasp the round bread and put it in the right basket -- Grasp the can and put it in the right basket -- Pick up the compass and put it in the left basket -- Grasp the soft cleanser and put it in the left basket -- Pick up the orange and put it in the right basket -- Pick up the round chewing gum and put it in the right basket -- Grasp the playing cards and put it in the left basket -- Grasp the purple garbage bag and put it in the left basket -- Place the back scratcher in the center of the table -- Place the eggplant in the center of the table -- Pick up the square chewing gum and put it in the right basket -- Pick up the washing liquid and put it in the left basket -- Grasp the red pot and put it in the left basket -- Grasp the grape and put it in the right basket -- Pick up the tape and put it in the left basket -- Pick up the shampoo and put it in the left basket -- Pick up the playing cards and put it in the left basket -- Place the orange in the center of the table -- Grasp the eggplant and put it in the right basket -- Pick up the banana and put it in the right basket -- Grasp the washing liquid and put it in the left basket -- Grasp the shampoo and put it in the left basket -- Place the pink bowl in the center of the table -- Place the compass in the center of the table -- Place the peach in the center of the table -- Place the banana in the center of the table -- Place the tape in the center of the table -- Pick up the round bread and put it in the left basket -- Pick up the tea cup and put it in the left basket -- Pick up the round bread and put it in the right basket -- Grasp the banana and put it in the right basket -- Grasp the pear and put it in the right basket -- Place the lemon and put it in the center of the table -- Grasp the compass and put it in the right basket -- Pick up the can and put it in the right basket -- Place the red pot in the center of the table -- Pick up the yellow marker and put it in the left basket -- Grasp the yellow cake and put it in the right basket -- Grasp the long bread and put it in the right basket -- Grasp the canned cola and put it in the right basket -- Grasp the glasses case and put it in the left basket -- Pick up the lime and put it in the right basket -- Pick up the pear and put it in the right basket -- Grasp the green chewing gum and put it in the right basket -- Pick up the croissant and put it in the right basket -- Pick up the chinese cabbage and put it in the right basket -- Place the fruit fudge in the center of the table -- Grasp the white eraser and put it in the left basket -- Place the yellow marker in the center of the table -- Place the round chewing gum in the center of the table -- Pick up the canned cola and put it in the right basket -- Pick up the red pot and put it in the left basket -- Place the green lemon in the center of the table -- Grasp the pink marker and put it in the left basket -- Place the washing liquid in the center of the table -- Abnormal -- Grasp the peach and put it in the right basket -- Grasp the soap and put it in the left basket -- Place the Red Bull in the center of the table -- Place the mango in the center of the table -- Grasp the fruit fudge and put it in the right basket -- Grasp the bath ball and put it in the left basket -- Grasp the red bull and put it in the right basket -- Pick up the lemon and put it in the right basket -- Grasp the chinese cabbage and put it in the right basket -- Grasp the pink pot and put it in the left basket -- Grasp the croissant and put it in the right basket -- Grasp the soda water and put it in the right basket -- Pick up the egg yolk pastry and put it in the right basket -- Grasp the spoon and put it in the left basket -- Grasp the blue cup and put it in the left basket -- Place the blue towel in the center of the table -- Place the yellow cake in the center of the table -- Place the grey cup in the center of the table -- Grasp the tape and put it in the left basket -- Place the square chewing gum in the center of the table -- Pick up the milk and put it in the right basket -- Grasp the back scratcher and put it in the right basket -- Grasp the yogurt and put it in the right basket -- Pick up the yellow cake and put it in the right basket -- Pick up the blue bowl and put it in the left basket -- Grasp the lemon and put it in the right basket -- Grasp the back scratcher and put it in the left basket -- Grasp the orange and put it in the right basket -- Pick up the soda water and put it in the right basket -- Grasp the red pot and put it in the right basket -- Grasp the milk and put it in the right basket -- Pick up the blue cup and put it in the left basket -- Pick up the mango and put it in the right basket -- Place the pink marker in the center of the table -- Grasp the grey cup and put it in the left basket -- Pick up the long bread and put it in the right basket -- Pick up the soap and put it in the left basket -- Grasp the peach and put it in the left basket -- Pick up the pink pot and put it in the left basket -- Grasp the sausage and put it in the right basket -- Pick up the yogurt and put it in the right basket -- Place the bath ball in the center of the table -- Grasp the square chewing gum and put it in the right basket -- Grasp the ad milk and put it in the right basket -- Place the pink pot in the center of the table -- Grasp the compass and put it in the left basket -- Place the glasses case in the center of the table -- Place the gray towel in the center of the table -- Pick up the back scratcher and put it in the left basket -- Place the purple garbage bag in the center of the table -- Place the blue cup in the center of the table -- Pick up the blue towel and put it in the left basket -- Place the pear in the center of the table -- Place the soft cleanser in the center of the table -- Grasp the egg yolk pastry and put it in the right basket -- Pick up the spoon and put it in the left basket -- Place the sausage in the center of the table -- Place the green chewing gum in the center of the table -- Pick up the peach and put it in the right basket -- Place the can in the center of the table -- Grasp the mango and put it in the right basket -- Place the canned cola in the center of the table -- Pick up the gray towel and put it in the left basket -- Place the ad milk in the center of the table -- Pick up the sausage and put it in the right basket -- Grasp the hard cleanser and put it in the left basket -- Pick up the pink marker and put it in the left basket -- Grasp the detergent and put it in the left basket -- Pick up the pink bowl and put it in the left basket -- Grasp the Red Bull and put it in the right basket -- Pick up the ad milk and put it in the right basket -- Place the long bread in the center of the table -- Pick up the soft cleanser and put it in the left basket -- Place the croissant in the center of the table -- Grasp the lime and put it in the right basket -- Place the round bread in the center of the table -- Pick up the green chewing gum and put it in the right basket -- Grasp the pink bowl and put it in the left basket -- Place the playing cards in the center of the table -- Pick up the glasses case and put it in the left basket -- Pick up the fruit fudge and put it in the right basket -- Pick up the eggplant and put it in the right basket -- Place the white eraser in the center of the table -- Place the yogurt in the center of the table -- Pick up the detergent and put it in the left basket -- Grasp the blue towel and put it in the left basket -- Pick up the red bull and put it in the right basket -- Place the lime in the center of the table -- Grasp the mint candy and put it in the right basket -- Place the soap in the center of the table -- Grasp the red marker and put it in the left basket -- Pick up the purple garbage bag and put it in the left basket -- Place the detergent in the center of the table -- Pick up the bath ball and put it in the left basket -- Grasp the gray towel and put it in the left basket -- Grasp the yellow marker and put it in the left basket -- Grasp the pink towel and put it in the left basket -- End -- Place the shampoo in the center of the table -- Grasp the green lemon and put it in the right basket -- Grasp the round chewing gum and put it in the right basket -- 'null' +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 0 +- subtask: Grasp the can and put it in the right basket + subtask_index: 1 +- subtask: Pick up the compass and put it in the left basket + subtask_index: 2 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 3 +- subtask: Pick up the orange and put it in the right basket + subtask_index: 4 +- subtask: Pick up the round chewing gum and put it in the right basket + subtask_index: 5 +- subtask: Grasp the playing cards and put it in the left basket + subtask_index: 6 +- subtask: Grasp the purple garbage bag and put it in the left basket + subtask_index: 7 +- subtask: Place the back scratcher in the center of the table + subtask_index: 8 +- subtask: Place the eggplant in the center of the table + subtask_index: 9 +- subtask: Pick up the square chewing gum and put it in the right basket + subtask_index: 10 +- subtask: Pick up the washing liquid and put it in the left basket + subtask_index: 11 +- subtask: Grasp the red pot and put it in the left basket + subtask_index: 12 +- subtask: Grasp the grape and put it in the right basket + subtask_index: 13 +- subtask: Pick up the tape and put it in the left basket + subtask_index: 14 +- subtask: Pick up the shampoo and put it in the left basket + subtask_index: 15 +- subtask: Pick up the playing cards and put it in the left basket + subtask_index: 16 +- subtask: Place the orange in the center of the table + subtask_index: 17 +- subtask: Grasp the eggplant and put it in the right basket + subtask_index: 18 +- subtask: Pick up the banana and put it in the right basket + subtask_index: 19 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 20 +- subtask: Grasp the shampoo and put it in the left basket + subtask_index: 21 +- subtask: Place the pink bowl in the center of the table + subtask_index: 22 +- subtask: Place the compass in the center of the table + subtask_index: 23 +- subtask: Place the peach in the center of the table + subtask_index: 24 +- subtask: Place the banana in the center of the table + subtask_index: 25 +- subtask: Place the tape in the center of the table + subtask_index: 26 +- subtask: Pick up the round bread and put it in the left basket + subtask_index: 27 +- subtask: Pick up the tea cup and put it in the left basket + subtask_index: 28 +- subtask: Pick up the round bread and put it in the right basket + subtask_index: 29 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 30 +- subtask: Grasp the pear and put it in the right basket + subtask_index: 31 +- subtask: Place the lemon and put it in the center of the table + subtask_index: 32 +- subtask: Grasp the compass and put it in the right basket + subtask_index: 33 +- subtask: Pick up the can and put it in the right basket + subtask_index: 34 +- subtask: Place the red pot in the center of the table + subtask_index: 35 +- subtask: Pick up the yellow marker and put it in the left basket + subtask_index: 36 +- subtask: Grasp the yellow cake and put it in the right basket + subtask_index: 37 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 38 +- subtask: Grasp the canned cola and put it in the right basket + subtask_index: 39 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 40 +- subtask: Pick up the lime and put it in the right basket + subtask_index: 41 +- subtask: Pick up the pear and put it in the right basket + subtask_index: 42 +- subtask: Grasp the green chewing gum and put it in the right basket + subtask_index: 43 +- subtask: Pick up the croissant and put it in the right basket + subtask_index: 44 +- subtask: Pick up the chinese cabbage and put it in the right basket + subtask_index: 45 +- subtask: Place the fruit fudge in the center of the table + subtask_index: 46 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 47 +- subtask: Place the yellow marker in the center of the table + subtask_index: 48 +- subtask: Place the round chewing gum in the center of the table + subtask_index: 49 +- subtask: Pick up the canned cola and put it in the right basket + subtask_index: 50 +- subtask: Pick up the red pot and put it in the left basket + subtask_index: 51 +- subtask: Place the green lemon in the center of the table + subtask_index: 52 +- subtask: Grasp the pink marker and put it in the left basket + subtask_index: 53 +- subtask: Place the washing liquid in the center of the table + subtask_index: 54 +- subtask: Abnormal + subtask_index: 55 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 56 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 57 +- subtask: Place the Red Bull in the center of the table + subtask_index: 58 +- subtask: Place the mango in the center of the table + subtask_index: 59 +- subtask: Grasp the fruit fudge and put it in the right basket + subtask_index: 60 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 61 +- subtask: Grasp the red bull and put it in the right basket + subtask_index: 62 +- subtask: Pick up the lemon and put it in the right basket + subtask_index: 63 +- subtask: Grasp the chinese cabbage and put it in the right basket + subtask_index: 64 +- subtask: Grasp the pink pot and put it in the left basket + subtask_index: 65 +- subtask: Grasp the croissant and put it in the right basket + subtask_index: 66 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 67 +- subtask: Pick up the egg yolk pastry and put it in the right basket + subtask_index: 68 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 69 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 70 +- subtask: Place the blue towel in the center of the table + subtask_index: 71 +- subtask: Place the yellow cake in the center of the table + subtask_index: 72 +- subtask: Place the grey cup in the center of the table + subtask_index: 73 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 74 +- subtask: Place the square chewing gum in the center of the table + subtask_index: 75 +- subtask: Pick up the milk and put it in the right basket + subtask_index: 76 +- subtask: Grasp the back scratcher and put it in the right basket + subtask_index: 77 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 78 +- subtask: Pick up the yellow cake and put it in the right basket + subtask_index: 79 +- subtask: Pick up the blue bowl and put it in the left basket + subtask_index: 80 +- subtask: Grasp the lemon and put it in the right basket + subtask_index: 81 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 82 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 83 +- subtask: Pick up the soda water and put it in the right basket + subtask_index: 84 +- subtask: Grasp the red pot and put it in the right basket + subtask_index: 85 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 86 +- subtask: Pick up the blue cup and put it in the left basket + subtask_index: 87 +- subtask: Pick up the mango and put it in the right basket + subtask_index: 88 +- subtask: Place the pink marker in the center of the table + subtask_index: 89 +- subtask: Grasp the grey cup and put it in the left basket + subtask_index: 90 +- subtask: Pick up the long bread and put it in the right basket + subtask_index: 91 +- subtask: Pick up the soap and put it in the left basket + subtask_index: 92 +- subtask: Grasp the peach and put it in the left basket + subtask_index: 93 +- subtask: Pick up the pink pot and put it in the left basket + subtask_index: 94 +- subtask: Grasp the sausage and put it in the right basket + subtask_index: 95 +- subtask: Pick up the yogurt and put it in the right basket + subtask_index: 96 +- subtask: Place the bath ball in the center of the table + subtask_index: 97 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 98 +- subtask: Grasp the ad milk and put it in the right basket + subtask_index: 99 +- subtask: Place the pink pot in the center of the table + subtask_index: 100 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 101 +- subtask: Place the glasses case in the center of the table + subtask_index: 102 +- subtask: Place the gray towel in the center of the table + subtask_index: 103 +- subtask: Pick up the back scratcher and put it in the left basket + subtask_index: 104 +- subtask: Place the purple garbage bag in the center of the table + subtask_index: 105 +- subtask: Place the blue cup in the center of the table + subtask_index: 106 +- subtask: Pick up the blue towel and put it in the left basket + subtask_index: 107 +- subtask: Place the pear in the center of the table + subtask_index: 108 +- subtask: Place the soft cleanser in the center of the table + subtask_index: 109 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 110 +- subtask: Pick up the spoon and put it in the left basket + subtask_index: 111 +- subtask: Place the sausage in the center of the table + subtask_index: 112 +- subtask: Place the green chewing gum in the center of the table + subtask_index: 113 +- subtask: Pick up the peach and put it in the right basket + subtask_index: 114 +- subtask: Place the can in the center of the table + subtask_index: 115 +- subtask: Grasp the mango and put it in the right basket + subtask_index: 116 +- subtask: Place the canned cola in the center of the table + subtask_index: 117 +- subtask: Pick up the gray towel and put it in the left basket + subtask_index: 118 +- subtask: Place the ad milk in the center of the table + subtask_index: 119 +- subtask: Pick up the sausage and put it in the right basket + subtask_index: 120 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 121 +- subtask: Pick up the pink marker and put it in the left basket + subtask_index: 122 +- subtask: Grasp the detergent and put it in the left basket + subtask_index: 123 +- subtask: Pick up the pink bowl and put it in the left basket + subtask_index: 124 +- subtask: Grasp the Red Bull and put it in the right basket + subtask_index: 125 +- subtask: Pick up the ad milk and put it in the right basket + subtask_index: 126 +- subtask: Place the long bread in the center of the table + subtask_index: 127 +- subtask: Pick up the soft cleanser and put it in the left basket + subtask_index: 128 +- subtask: Place the croissant in the center of the table + subtask_index: 129 +- subtask: Grasp the lime and put it in the right basket + subtask_index: 130 +- subtask: Place the round bread in the center of the table + subtask_index: 131 +- subtask: Pick up the green chewing gum and put it in the right basket + subtask_index: 132 +- subtask: Grasp the pink bowl and put it in the left basket + subtask_index: 133 +- subtask: Place the playing cards in the center of the table + subtask_index: 134 +- subtask: Pick up the glasses case and put it in the left basket + subtask_index: 135 +- subtask: Pick up the fruit fudge and put it in the right basket + subtask_index: 136 +- subtask: Pick up the eggplant and put it in the right basket + subtask_index: 137 +- subtask: Place the white eraser in the center of the table + subtask_index: 138 +- subtask: Place the yogurt in the center of the table + subtask_index: 139 +- subtask: Pick up the detergent and put it in the left basket + subtask_index: 140 +- subtask: Grasp the blue towel and put it in the left basket + subtask_index: 141 +- subtask: Pick up the red bull and put it in the right basket + subtask_index: 142 +- subtask: Place the lime in the center of the table + subtask_index: 143 +- subtask: Grasp the mint candy and put it in the right basket + subtask_index: 144 +- subtask: Place the soap in the center of the table + subtask_index: 145 +- subtask: Grasp the red marker and put it in the left basket + subtask_index: 146 +- subtask: Pick up the purple garbage bag and put it in the left basket + subtask_index: 147 +- subtask: Place the detergent in the center of the table + subtask_index: 148 +- subtask: Pick up the bath ball and put it in the left basket + subtask_index: 149 +- subtask: Grasp the gray towel and put it in the left basket + subtask_index: 150 +- subtask: Grasp the yellow marker and put it in the left basket + subtask_index: 151 +- subtask: Grasp the pink towel and put it in the left basket + subtask_index: 152 +- subtask: End + subtask_index: 153 +- subtask: Place the shampoo in the center of the table + subtask_index: 154 +- subtask: Grasp the green lemon and put it in the right basket + subtask_index: 155 +- subtask: Grasp the round chewing gum and put it in the right basket + subtask_index: 156 +- subtask: 'null' + subtask_index: 157 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -261,13 +420,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -275,8 +431,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 224 total_frames: 251221 fps: 30 @@ -363,11 +518,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_classify_object_green_tableclot |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:223 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -635,7 +788,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -643,7 +796,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -670,496 +822,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_classify_object_green_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - on the green table,place the food in the right basket with the right gripper, - and place the non food items in the left basket with the left gripper. - sub_tasks: - - subtask: Grasp the round bread and put it in the right basket - subtask_index: 0 - - subtask: Grasp the can and put it in the right basket - subtask_index: 1 - - subtask: Pick up the compass and put it in the left basket - subtask_index: 2 - - subtask: Grasp the soft cleanser and put it in the left basket - subtask_index: 3 - - subtask: Pick up the orange and put it in the right basket - subtask_index: 4 - - subtask: Pick up the round chewing gum and put it in the right basket - subtask_index: 5 - - subtask: Grasp the playing cards and put it in the left basket - subtask_index: 6 - - subtask: Grasp the purple garbage bag and put it in the left basket - subtask_index: 7 - - subtask: Place the back scratcher in the center of the table - subtask_index: 8 - - subtask: Place the eggplant in the center of the table - subtask_index: 9 - - subtask: Pick up the square chewing gum and put it in the right basket - subtask_index: 10 - - subtask: Pick up the washing liquid and put it in the left basket - subtask_index: 11 - - subtask: Grasp the red pot and put it in the left basket - subtask_index: 12 - - subtask: Grasp the grape and put it in the right basket - subtask_index: 13 - - subtask: Pick up the tape and put it in the left basket - subtask_index: 14 - - subtask: Pick up the shampoo and put it in the left basket - subtask_index: 15 - - subtask: Pick up the playing cards and put it in the left basket - subtask_index: 16 - - subtask: Place the orange in the center of the table - subtask_index: 17 - - subtask: Grasp the eggplant and put it in the right basket - subtask_index: 18 - - subtask: Pick up the banana and put it in the right basket - subtask_index: 19 - - subtask: Grasp the washing liquid and put it in the left basket - subtask_index: 20 - - subtask: Grasp the shampoo and put it in the left basket - subtask_index: 21 - - subtask: Place the pink bowl in the center of the table - subtask_index: 22 - - subtask: Place the compass in the center of the table - subtask_index: 23 - - subtask: Place the peach in the center of the table - subtask_index: 24 - - subtask: Place the banana in the center of the table - subtask_index: 25 - - subtask: Place the tape in the center of the table - subtask_index: 26 - - subtask: Pick up the round bread and put it in the left basket - subtask_index: 27 - - subtask: Pick up the tea cup and put it in the left basket - subtask_index: 28 - - subtask: Pick up the round bread and put it in the right basket - subtask_index: 29 - - subtask: Grasp the banana and put it in the right basket - subtask_index: 30 - - subtask: Grasp the pear and put it in the right basket - subtask_index: 31 - - subtask: Place the lemon and put it in the center of the table - subtask_index: 32 - - subtask: Grasp the compass and put it in the right basket - subtask_index: 33 - - subtask: Pick up the can and put it in the right basket - subtask_index: 34 - - subtask: Place the red pot in the center of the table - subtask_index: 35 - - subtask: Pick up the yellow marker and put it in the left basket - subtask_index: 36 - - subtask: Grasp the yellow cake and put it in the right basket - subtask_index: 37 - - subtask: Grasp the long bread and put it in the right basket - subtask_index: 38 - - subtask: Grasp the canned cola and put it in the right basket - subtask_index: 39 - - subtask: Grasp the glasses case and put it in the left basket - subtask_index: 40 - - subtask: Pick up the lime and put it in the right basket - subtask_index: 41 - - subtask: Pick up the pear and put it in the right basket - subtask_index: 42 - - subtask: Grasp the green chewing gum and put it in the right basket - subtask_index: 43 - - subtask: Pick up the croissant and put it in the right basket - subtask_index: 44 - - subtask: Pick up the chinese cabbage and put it in the right basket - subtask_index: 45 - - subtask: Place the fruit fudge in the center of the table - subtask_index: 46 - - subtask: Grasp the white eraser and put it in the left basket - subtask_index: 47 - - subtask: Place the yellow marker in the center of the table - subtask_index: 48 - - subtask: Place the round chewing gum in the center of the table - subtask_index: 49 - - subtask: Pick up the canned cola and put it in the right basket - subtask_index: 50 - - subtask: Pick up the red pot and put it in the left basket - subtask_index: 51 - - subtask: Place the green lemon in the center of the table - subtask_index: 52 - - subtask: Grasp the pink marker and put it in the left basket - subtask_index: 53 - - subtask: Place the washing liquid in the center of the table - subtask_index: 54 - - subtask: Abnormal - subtask_index: 55 - - subtask: Grasp the peach and put it in the right basket - subtask_index: 56 - - subtask: Grasp the soap and put it in the left basket - subtask_index: 57 - - subtask: Place the Red Bull in the center of the table - subtask_index: 58 - - subtask: Place the mango in the center of the table - subtask_index: 59 - - subtask: Grasp the fruit fudge and put it in the right basket - subtask_index: 60 - - subtask: Grasp the bath ball and put it in the left basket - subtask_index: 61 - - subtask: Grasp the red bull and put it in the right basket - subtask_index: 62 - - subtask: Pick up the lemon and put it in the right basket - subtask_index: 63 - - subtask: Grasp the chinese cabbage and put it in the right basket - subtask_index: 64 - - subtask: Grasp the pink pot and put it in the left basket - subtask_index: 65 - - subtask: Grasp the croissant and put it in the right basket - subtask_index: 66 - - subtask: Grasp the soda water and put it in the right basket - subtask_index: 67 - - subtask: Pick up the egg yolk pastry and put it in the right basket - subtask_index: 68 - - subtask: Grasp the spoon and put it in the left basket - subtask_index: 69 - - subtask: Grasp the blue cup and put it in the left basket - subtask_index: 70 - - subtask: Place the blue towel in the center of the table - subtask_index: 71 - - subtask: Place the yellow cake in the center of the table - subtask_index: 72 - - subtask: Place the grey cup in the center of the table - subtask_index: 73 - - subtask: Grasp the tape and put it in the left basket - subtask_index: 74 - - subtask: Place the square chewing gum in the center of the table - subtask_index: 75 - - subtask: Pick up the milk and put it in the right basket - subtask_index: 76 - - subtask: Grasp the back scratcher and put it in the right basket - subtask_index: 77 - - subtask: Grasp the yogurt and put it in the right basket - subtask_index: 78 - - subtask: Pick up the yellow cake and put it in the right basket - subtask_index: 79 - - subtask: Pick up the blue bowl and put it in the left basket - subtask_index: 80 - - subtask: Grasp the lemon and put it in the right basket - subtask_index: 81 - - subtask: Grasp the back scratcher and put it in the left basket - subtask_index: 82 - - subtask: Grasp the orange and put it in the right basket - subtask_index: 83 - - subtask: Pick up the soda water and put it in the right basket - subtask_index: 84 - - subtask: Grasp the red pot and put it in the right basket - subtask_index: 85 - - subtask: Grasp the milk and put it in the right basket - subtask_index: 86 - - subtask: Pick up the blue cup and put it in the left basket - subtask_index: 87 - - subtask: Pick up the mango and put it in the right basket - subtask_index: 88 - - subtask: Place the pink marker in the center of the table - subtask_index: 89 - - subtask: Grasp the grey cup and put it in the left basket - subtask_index: 90 - - subtask: Pick up the long bread and put it in the right basket - subtask_index: 91 - - subtask: Pick up the soap and put it in the left basket - subtask_index: 92 - - subtask: Grasp the peach and put it in the left basket - subtask_index: 93 - - subtask: Pick up the pink pot and put it in the left basket - subtask_index: 94 - - subtask: Grasp the sausage and put it in the right basket - subtask_index: 95 - - subtask: Pick up the yogurt and put it in the right basket - subtask_index: 96 - - subtask: Place the bath ball in the center of the table - subtask_index: 97 - - subtask: Grasp the square chewing gum and put it in the right basket - subtask_index: 98 - - subtask: Grasp the ad milk and put it in the right basket - subtask_index: 99 - - subtask: Place the pink pot in the center of the table - subtask_index: 100 - - subtask: Grasp the compass and put it in the left basket - subtask_index: 101 - - subtask: Place the glasses case in the center of the table - subtask_index: 102 - - subtask: Place the gray towel in the center of the table - subtask_index: 103 - - subtask: Pick up the back scratcher and put it in the left basket - subtask_index: 104 - - subtask: Place the purple garbage bag in the center of the table - subtask_index: 105 - - subtask: Place the blue cup in the center of the table - subtask_index: 106 - - subtask: Pick up the blue towel and put it in the left basket - subtask_index: 107 - - subtask: Place the pear in the center of the table - subtask_index: 108 - - subtask: Place the soft cleanser in the center of the table - subtask_index: 109 - - subtask: Grasp the egg yolk pastry and put it in the right basket - subtask_index: 110 - - subtask: Pick up the spoon and put it in the left basket - subtask_index: 111 - - subtask: Place the sausage in the center of the table - subtask_index: 112 - - subtask: Place the green chewing gum in the center of the table - subtask_index: 113 - - subtask: Pick up the peach and put it in the right basket - subtask_index: 114 - - subtask: Place the can in the center of the table - subtask_index: 115 - - subtask: Grasp the mango and put it in the right basket - subtask_index: 116 - - subtask: Place the canned cola in the center of the table - subtask_index: 117 - - subtask: Pick up the gray towel and put it in the left basket - subtask_index: 118 - - subtask: Place the ad milk in the center of the table - subtask_index: 119 - - subtask: Pick up the sausage and put it in the right basket - subtask_index: 120 - - subtask: Grasp the hard cleanser and put it in the left basket - subtask_index: 121 - - subtask: Pick up the pink marker and put it in the left basket - subtask_index: 122 - - subtask: Grasp the detergent and put it in the left basket - subtask_index: 123 - - subtask: Pick up the pink bowl and put it in the left basket - subtask_index: 124 - - subtask: Grasp the Red Bull and put it in the right basket - subtask_index: 125 - - subtask: Pick up the ad milk and put it in the right basket - subtask_index: 126 - - subtask: Place the long bread in the center of the table - subtask_index: 127 - - subtask: Pick up the soft cleanser and put it in the left basket - subtask_index: 128 - - subtask: Place the croissant in the center of the table - subtask_index: 129 - - subtask: Grasp the lime and put it in the right basket - subtask_index: 130 - - subtask: Place the round bread in the center of the table - subtask_index: 131 - - subtask: Pick up the green chewing gum and put it in the right basket - subtask_index: 132 - - subtask: Grasp the pink bowl and put it in the left basket - subtask_index: 133 - - subtask: Place the playing cards in the center of the table - subtask_index: 134 - - subtask: Pick up the glasses case and put it in the left basket - subtask_index: 135 - - subtask: Pick up the fruit fudge and put it in the right basket - subtask_index: 136 - - subtask: Pick up the eggplant and put it in the right basket - subtask_index: 137 - - subtask: Place the white eraser in the center of the table - subtask_index: 138 - - subtask: Place the yogurt in the center of the table - subtask_index: 139 - - subtask: Pick up the detergent and put it in the left basket - subtask_index: 140 - - subtask: Grasp the blue towel and put it in the left basket - subtask_index: 141 - - subtask: Pick up the red bull and put it in the right basket - subtask_index: 142 - - subtask: Place the lime in the center of the table - subtask_index: 143 - - subtask: Grasp the mint candy and put it in the right basket - subtask_index: 144 - - subtask: Place the soap in the center of the table - subtask_index: 145 - - subtask: Grasp the red marker and put it in the left basket - subtask_index: 146 - - subtask: Pick up the purple garbage bag and put it in the left basket - subtask_index: 147 - - subtask: Place the detergent in the center of the table - subtask_index: 148 - - subtask: Pick up the bath ball and put it in the left basket - subtask_index: 149 - - subtask: Grasp the gray towel and put it in the left basket - subtask_index: 150 - - subtask: Grasp the yellow marker and put it in the left basket - subtask_index: 151 - - subtask: Grasp the pink towel and put it in the left basket - subtask_index: 152 - - subtask: End - subtask_index: 153 - - subtask: Place the shampoo in the center of the table - subtask_index: 154 - - subtask: Grasp the green lemon and put it in the right basket - subtask_index: 155 - - subtask: Grasp the round chewing gum and put it in the right basket - subtask_index: 156 - - subtask: 'null' - subtask_index: 157 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 251221 - dataset_size: 25.62 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_classify_object_green_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (212 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_six.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_six.yaml index b45641aa4ea44c2dab7471b0e2555ffb9d883fb0..c704497d3ecfd5404f7f84d34c5598a4d5d99f2a 100644 --- a/dataset_info/Galaxea_R1_Lite_classify_object_six.yaml +++ b/dataset_info/Galaxea_R1_Lite_classify_object_six.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_basket level1: baskets level2: brown_basket @@ -69,97 +69,171 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the food in the right basket with the right gripper, and place - the non food items in the left basket with the left gripper. +task_instruction: +- place the food in the right basket with the right gripper, and place the non food + items in the left basket with the left gripper. sub_tasks: -- Grasp the round bread and put it in the right basket -- Grasp the small basket containing bread and put it in the right basket -- Grasp the lemon and put it in the right basket -- Grasp the egg yolk pastry and put it in the right basket -- Grasp the can and put it in the right basket -- Grasp the apple and put it in the right basket -- Grasp the long bread and put it in the right basket -- Grasp the canned cola and put it in the right basket -- Grasp the back scratcher and put it in the left basket -- Grasp the glasses case and put it in the left basket -- Grasp the peeler and put it in the left basket -- Grasp the cookie and put it in the right basket -- Grasp the yellow duck and put it in the right basket -- Grasp the cleaning agent and put it in the left basket -- Grasp the soft cleanser and put it in the left basket -- Grasp the waffle and put it in the right basket -- Grasp the grey towel and put it in the left basket -- Grasp the orange and put it in the right basket -- Grasp the brown towel and put it in the left basket -- Grasp the pen container and put it in the left basket -- Grasp the hard cleanser and put it in the left basket -- Grasp the canned cola and put it in the left basket -- Grasp the triangle cake and put it in the right basket -- Grasp the shower sphere and put it in the left basket -- Grasp the rubiks cube and put it in the right basket -- Grasp the broom and put it in the left basket -- Grasp the white eraser and put it in the left basket -- Grasp the milk and put it in the right basket -- Grasp the lime and put it in the right basket -- Grasp the brown towel and put it in the right basket -- Grasp the duck toys and put it in the left basket -- Grasp the power strip and put it in the left basket -- Grasp the red duck and put it in the left basket -- Grasp the shampoo and put it in the left basket -- Grasp the yellow duck and put it in the left basket -- Grasp the peach and put it in the right basket -- Abnormal -- Grasp the tea cup and put it in the left basket -- Grasp the pink marker and put it in the left basket -- Grasp the brush and put it in the left basket -- Grasp the washing liquid and put it in the left basket -- Place the rubiks cube in the center of the table -- Grasp the soap and put it in the left basket -- Grasp the grey towel and put it in the right basket -- Grasp the blue marker and put it in the left basket -- Grasp the black glass cup and put it in the left basket -- Grasp the bath ball and put it in the left basket -- Grasp the coke and put it in the right basket -- Grasp the potato chips and put it in the right basket -- Grasp the ballpoint pen and put it in the left basket -- Grasp the rubiks cube and put it in the left basket -- Grasp the square chewing gum and put it in the right basket -- Grasp the glasses case and put it in the right basket -- Grasp the banana and put it in the right basket -- Grasp the ad milk and put it in the right basket -- Grasp the soda water and put it in the right basket -- Grasp the peach doll and put it in the right basket -- Grasp the spoon and put it in the left basket -- Grasp the blue marker and put it in the right basket -- Grasp the blue cup and put it in the left basket -- Grasp the compass and put it in the left basket -- Grasp the chocolate and put it in the right basket -- Grasp the compass and put it in the right basket -- End -- Grasp the tape and put it in the left basket -- Grasp the peeler and put it in the right basket -- Grasp the yogurt and put it in the right basket -- Grasp the green lemon and put it in the right basket -- Grasp the black marker and put it in the left basket -- Grasp the round chewing gum and put it in the right basket -- Grasp the bread slice and put it in the right basket -- Grasp the square chewing gum and put it in the left basket -- 'null' +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 0 +- subtask: Grasp the small basket containing bread and put it in the right basket + subtask_index: 1 +- subtask: Grasp the lemon and put it in the right basket + subtask_index: 2 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 3 +- subtask: Grasp the can and put it in the right basket + subtask_index: 4 +- subtask: Grasp the apple and put it in the right basket + subtask_index: 5 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 6 +- subtask: Grasp the canned cola and put it in the right basket + subtask_index: 7 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 8 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 9 +- subtask: Grasp the peeler and put it in the left basket + subtask_index: 10 +- subtask: Grasp the cookie and put it in the right basket + subtask_index: 11 +- subtask: Grasp the yellow duck and put it in the right basket + subtask_index: 12 +- subtask: Grasp the cleaning agent and put it in the left basket + subtask_index: 13 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 14 +- subtask: Grasp the waffle and put it in the right basket + subtask_index: 15 +- subtask: Grasp the grey towel and put it in the left basket + subtask_index: 16 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 17 +- subtask: Grasp the brown towel and put it in the left basket + subtask_index: 18 +- subtask: Grasp the pen container and put it in the left basket + subtask_index: 19 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 20 +- subtask: Grasp the canned cola and put it in the left basket + subtask_index: 21 +- subtask: Grasp the triangle cake and put it in the right basket + subtask_index: 22 +- subtask: Grasp the shower sphere and put it in the left basket + subtask_index: 23 +- subtask: Grasp the rubiks cube and put it in the right basket + subtask_index: 24 +- subtask: Grasp the broom and put it in the left basket + subtask_index: 25 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 26 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 27 +- subtask: Grasp the lime and put it in the right basket + subtask_index: 28 +- subtask: Grasp the brown towel and put it in the right basket + subtask_index: 29 +- subtask: Grasp the duck toys and put it in the left basket + subtask_index: 30 +- subtask: Grasp the power strip and put it in the left basket + subtask_index: 31 +- subtask: Grasp the red duck and put it in the left basket + subtask_index: 32 +- subtask: Grasp the shampoo and put it in the left basket + subtask_index: 33 +- subtask: Grasp the yellow duck and put it in the left basket + subtask_index: 34 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 35 +- subtask: Abnormal + subtask_index: 36 +- subtask: Grasp the tea cup and put it in the left basket + subtask_index: 37 +- subtask: Grasp the pink marker and put it in the left basket + subtask_index: 38 +- subtask: Grasp the brush and put it in the left basket + subtask_index: 39 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 40 +- subtask: Place the rubiks cube in the center of the table + subtask_index: 41 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 42 +- subtask: Grasp the grey towel and put it in the right basket + subtask_index: 43 +- subtask: Grasp the blue marker and put it in the left basket + subtask_index: 44 +- subtask: Grasp the black glass cup and put it in the left basket + subtask_index: 45 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 46 +- subtask: Grasp the coke and put it in the right basket + subtask_index: 47 +- subtask: Grasp the potato chips and put it in the right basket + subtask_index: 48 +- subtask: Grasp the ballpoint pen and put it in the left basket + subtask_index: 49 +- subtask: Grasp the rubiks cube and put it in the left basket + subtask_index: 50 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 51 +- subtask: Grasp the glasses case and put it in the right basket + subtask_index: 52 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 53 +- subtask: Grasp the ad milk and put it in the right basket + subtask_index: 54 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 55 +- subtask: Grasp the peach doll and put it in the right basket + subtask_index: 56 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 57 +- subtask: Grasp the blue marker and put it in the right basket + subtask_index: 58 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 59 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 60 +- subtask: Grasp the chocolate and put it in the right basket + subtask_index: 61 +- subtask: Grasp the compass and put it in the right basket + subtask_index: 62 +- subtask: End + subtask_index: 63 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 64 +- subtask: Grasp the peeler and put it in the right basket + subtask_index: 65 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 66 +- subtask: Grasp the green lemon and put it in the right basket + subtask_index: 67 +- subtask: Grasp the black marker and put it in the left basket + subtask_index: 68 +- subtask: Grasp the round chewing gum and put it in the right basket + subtask_index: 69 +- subtask: Grasp the bread slice and put it in the right basket + subtask_index: 70 +- subtask: Grasp the square chewing gum and put it in the left basket + subtask_index: 71 +- subtask: 'null' + subtask_index: 72 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -170,13 +244,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -184,8 +255,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 105 total_frames: 86401 fps: 30 @@ -272,11 +342,9 @@ data_structure: 'Galaxea_R1_Lite_classify_object_six_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:104 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -544,7 +612,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -552,7 +620,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -579,326 +646,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_classify_object_six - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the food in the right basket with the right gripper, and place the non food - items in the left basket with the left gripper. - sub_tasks: - - subtask: Grasp the round bread and put it in the right basket - subtask_index: 0 - - subtask: Grasp the small basket containing bread and put it in the right basket - subtask_index: 1 - - subtask: Grasp the lemon and put it in the right basket - subtask_index: 2 - - subtask: Grasp the egg yolk pastry and put it in the right basket - subtask_index: 3 - - subtask: Grasp the can and put it in the right basket - subtask_index: 4 - - subtask: Grasp the apple and put it in the right basket - subtask_index: 5 - - subtask: Grasp the long bread and put it in the right basket - subtask_index: 6 - - subtask: Grasp the canned cola and put it in the right basket - subtask_index: 7 - - subtask: Grasp the back scratcher and put it in the left basket - subtask_index: 8 - - subtask: Grasp the glasses case and put it in the left basket - subtask_index: 9 - - subtask: Grasp the peeler and put it in the left basket - subtask_index: 10 - - subtask: Grasp the cookie and put it in the right basket - subtask_index: 11 - - subtask: Grasp the yellow duck and put it in the right basket - subtask_index: 12 - - subtask: Grasp the cleaning agent and put it in the left basket - subtask_index: 13 - - subtask: Grasp the soft cleanser and put it in the left basket - subtask_index: 14 - - subtask: Grasp the waffle and put it in the right basket - subtask_index: 15 - - subtask: Grasp the grey towel and put it in the left basket - subtask_index: 16 - - subtask: Grasp the orange and put it in the right basket - subtask_index: 17 - - subtask: Grasp the brown towel and put it in the left basket - subtask_index: 18 - - subtask: Grasp the pen container and put it in the left basket - subtask_index: 19 - - subtask: Grasp the hard cleanser and put it in the left basket - subtask_index: 20 - - subtask: Grasp the canned cola and put it in the left basket - subtask_index: 21 - - subtask: Grasp the triangle cake and put it in the right basket - subtask_index: 22 - - subtask: Grasp the shower sphere and put it in the left basket - subtask_index: 23 - - subtask: Grasp the rubiks cube and put it in the right basket - subtask_index: 24 - - subtask: Grasp the broom and put it in the left basket - subtask_index: 25 - - subtask: Grasp the white eraser and put it in the left basket - subtask_index: 26 - - subtask: Grasp the milk and put it in the right basket - subtask_index: 27 - - subtask: Grasp the lime and put it in the right basket - subtask_index: 28 - - subtask: Grasp the brown towel and put it in the right basket - subtask_index: 29 - - subtask: Grasp the duck toys and put it in the left basket - subtask_index: 30 - - subtask: Grasp the power strip and put it in the left basket - subtask_index: 31 - - subtask: Grasp the red duck and put it in the left basket - subtask_index: 32 - - subtask: Grasp the shampoo and put it in the left basket - subtask_index: 33 - - subtask: Grasp the yellow duck and put it in the left basket - subtask_index: 34 - - subtask: Grasp the peach and put it in the right basket - subtask_index: 35 - - subtask: Abnormal - subtask_index: 36 - - subtask: Grasp the tea cup and put it in the left basket - subtask_index: 37 - - subtask: Grasp the pink marker and put it in the left basket - subtask_index: 38 - - subtask: Grasp the brush and put it in the left basket - subtask_index: 39 - - subtask: Grasp the washing liquid and put it in the left basket - subtask_index: 40 - - subtask: Place the rubiks cube in the center of the table - subtask_index: 41 - - subtask: Grasp the soap and put it in the left basket - subtask_index: 42 - - subtask: Grasp the grey towel and put it in the right basket - subtask_index: 43 - - subtask: Grasp the blue marker and put it in the left basket - subtask_index: 44 - - subtask: Grasp the black glass cup and put it in the left basket - subtask_index: 45 - - subtask: Grasp the bath ball and put it in the left basket - subtask_index: 46 - - subtask: Grasp the coke and put it in the right basket - subtask_index: 47 - - subtask: Grasp the potato chips and put it in the right basket - subtask_index: 48 - - subtask: Grasp the ballpoint pen and put it in the left basket - subtask_index: 49 - - subtask: Grasp the rubiks cube and put it in the left basket - subtask_index: 50 - - subtask: Grasp the square chewing gum and put it in the right basket - subtask_index: 51 - - subtask: Grasp the glasses case and put it in the right basket - subtask_index: 52 - - subtask: Grasp the banana and put it in the right basket - subtask_index: 53 - - subtask: Grasp the ad milk and put it in the right basket - subtask_index: 54 - - subtask: Grasp the soda water and put it in the right basket - subtask_index: 55 - - subtask: Grasp the peach doll and put it in the right basket - subtask_index: 56 - - subtask: Grasp the spoon and put it in the left basket - subtask_index: 57 - - subtask: Grasp the blue marker and put it in the right basket - subtask_index: 58 - - subtask: Grasp the blue cup and put it in the left basket - subtask_index: 59 - - subtask: Grasp the compass and put it in the left basket - subtask_index: 60 - - subtask: Grasp the chocolate and put it in the right basket - subtask_index: 61 - - subtask: Grasp the compass and put it in the right basket - subtask_index: 62 - - subtask: End - subtask_index: 63 - - subtask: Grasp the tape and put it in the left basket - subtask_index: 64 - - subtask: Grasp the peeler and put it in the right basket - subtask_index: 65 - - subtask: Grasp the yogurt and put it in the right basket - subtask_index: 66 - - subtask: Grasp the green lemon and put it in the right basket - subtask_index: 67 - - subtask: Grasp the black marker and put it in the left basket - subtask_index: 68 - - subtask: Grasp the round chewing gum and put it in the right basket - subtask_index: 69 - - subtask: Grasp the bread slice and put it in the right basket - subtask_index: 70 - - subtask: Grasp the square chewing gum and put it in the left basket - subtask_index: 71 - - subtask: 'null' - subtask_index: 72 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 86401 - dataset_size: 5.25 GB - data_structure: 'Galaxea_R1_Lite_classify_object_six_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (93 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml index 281b6bdade9fe9e437c0e9dabd180f676c2c246d..59356c9bb156ae5614e16d4cc48dab67ff402b09 100644 --- a/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml +++ b/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_basket level1: baskets level2: brown_basket @@ -69,88 +69,153 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: place the food in the right basket with the right gripper, and place - the non food items in the left basket with the left gripper. +task_instruction: +- place the food in the right basket with the right gripper, and place the non food + items in the left basket with the left gripper. sub_tasks: -- Grasp the rubiks cube and put it in the left basket -- Place the tape in the center of the table -- Grasp the soft cleanser and put it in the left basket -- Grasp the back scratcher and put it in the left basket -- Grasp the apple and put it in the right basket -- Grasp the yellow marker and put it in the left basket -- End -- Grasp the white eraser and put it in the left basket -- Grasp the power strip and put it in the left basket -- Grasp the square chewing gum and put it in the right basket -- Grasp the cleaning agent and put it in the left basket -- Grasp the blue marker pen and put it in the right basket -- Grasp the soda water and put it in the right basket -- Grasp the spoon and put it in the left basket -- Grasp the duck toys and put it in the left basket -- Grasp the blue marker pen and put it in the left basket -- Grasp the shampoo and put it in the left basket -- Grasp the triangle cake and put it in the right basket -- Grasp the brown plate and put it in the left basket -- Grasp the cookie and put it in the right basket -- Grasp the yellow cake and put it in the right basket -- Grasp the shower sphere and put it in the left basket -- Grasp the orange and put it in the right basket -- Grasp the compass and put it in the left basket -- Grasp the round bread and put it in the right basket -- Grasp the lemon and put it in the right basket -- Grasp the egg yolk pastry and put it in the right basket -- Grasp the soap and put it in the left basket -- Grasp the washing liquid and put it in the left basket -- Grasp the hard cleanser and put it in the left basket -- Grasp the milk and put it in the right basket -- Grasp the black marker and put it in the left basket -- Grasp the banana and put it in the right basket -- Grasp the black glass cup and put it in the left basket -- Grasp the blue marker and put it in the right basket -- Grasp the bath ball and put it in the left basket -- Abnormal -- Grasp the peeler and put it in the left basket -- Grasp the brown towel and put it in the left basket -- Grasp the peach and put it in the right basket -- Grasp the tea cup and put it in the left basket -- Grasp the brush and put it in the left basket -- Grasp the chocolate and put it in the right basket -- Grasp the grey towel and put it in the left basket -- Place the peach doll in the center of the table -- Grasp the rubiks cube and put it in the right basket -- Grasp the tape and put it in the left basket -- Grasp the bread slice and put it in the right basket -- Grasp the glasses case and put it in the left basket -- Grasp the soda water and put it in the left basket -- Grasp the peach doll and put it in the right basket -- Grasp the blue cup and put it in the left basket -- Grasp the spoon and put it in the right basket -- Grasp the pen container and put it in the left basket -- Grasp the red duck and put it in the left basket -- Grasp the glasses case and put it in the right basket -- Grasp the long bread and put it in the right basket -- Grasp the yogurt and put it in the right basket -- Grasp the potato chips and put it in the right basket -- Grasp the can and put it in the right basket -- Grasp the long bread and put it in the left basket -- Grasp the yellow duck and put it in the left basket -- Grasp the coke and put it in the right basket -- 'null' +- subtask: Grasp the rubiks cube and put it in the left basket + subtask_index: 0 +- subtask: Place the tape in the center of the table + subtask_index: 1 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 2 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 3 +- subtask: Grasp the apple and put it in the right basket + subtask_index: 4 +- subtask: Grasp the yellow marker and put it in the left basket + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 7 +- subtask: Grasp the power strip and put it in the left basket + subtask_index: 8 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 9 +- subtask: Grasp the cleaning agent and put it in the left basket + subtask_index: 10 +- subtask: Grasp the blue marker pen and put it in the right basket + subtask_index: 11 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 12 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 13 +- subtask: Grasp the duck toys and put it in the left basket + subtask_index: 14 +- subtask: Grasp the blue marker pen and put it in the left basket + subtask_index: 15 +- subtask: Grasp the shampoo and put it in the left basket + subtask_index: 16 +- subtask: Grasp the triangle cake and put it in the right basket + subtask_index: 17 +- subtask: Grasp the brown plate and put it in the left basket + subtask_index: 18 +- subtask: Grasp the cookie and put it in the right basket + subtask_index: 19 +- subtask: Grasp the yellow cake and put it in the right basket + subtask_index: 20 +- subtask: Grasp the shower sphere and put it in the left basket + subtask_index: 21 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 22 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 23 +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 24 +- subtask: Grasp the lemon and put it in the right basket + subtask_index: 25 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 26 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 27 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 28 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 29 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 30 +- subtask: Grasp the black marker and put it in the left basket + subtask_index: 31 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 32 +- subtask: Grasp the black glass cup and put it in the left basket + subtask_index: 33 +- subtask: Grasp the blue marker and put it in the right basket + subtask_index: 34 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 35 +- subtask: Abnormal + subtask_index: 36 +- subtask: Grasp the peeler and put it in the left basket + subtask_index: 37 +- subtask: Grasp the brown towel and put it in the left basket + subtask_index: 38 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 39 +- subtask: Grasp the tea cup and put it in the left basket + subtask_index: 40 +- subtask: Grasp the brush and put it in the left basket + subtask_index: 41 +- subtask: Grasp the chocolate and put it in the right basket + subtask_index: 42 +- subtask: Grasp the grey towel and put it in the left basket + subtask_index: 43 +- subtask: Place the peach doll in the center of the table + subtask_index: 44 +- subtask: Grasp the rubiks cube and put it in the right basket + subtask_index: 45 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 46 +- subtask: Grasp the bread slice and put it in the right basket + subtask_index: 47 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 48 +- subtask: Grasp the soda water and put it in the left basket + subtask_index: 49 +- subtask: Grasp the peach doll and put it in the right basket + subtask_index: 50 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 51 +- subtask: Grasp the spoon and put it in the right basket + subtask_index: 52 +- subtask: Grasp the pen container and put it in the left basket + subtask_index: 53 +- subtask: Grasp the red duck and put it in the left basket + subtask_index: 54 +- subtask: Grasp the glasses case and put it in the right basket + subtask_index: 55 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 56 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 57 +- subtask: Grasp the potato chips and put it in the right basket + subtask_index: 58 +- subtask: Grasp the can and put it in the right basket + subtask_index: 59 +- subtask: Grasp the long bread and put it in the left basket + subtask_index: 60 +- subtask: Grasp the yellow duck and put it in the left basket + subtask_index: 61 +- subtask: Grasp the coke and put it in the right basket + subtask_index: 62 +- subtask: 'null' + subtask_index: 63 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -161,13 +226,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -175,8 +237,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 197 total_frames: 134891 fps: 30 @@ -263,11 +324,9 @@ data_structure: 'Galaxea_R1_Lite_classify_object_three_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:196 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -535,7 +594,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -543,7 +602,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -570,308 +628,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_classify_object_three - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - place the food in the right basket with the right gripper, and place the non food - items in the left basket with the left gripper. - sub_tasks: - - subtask: Grasp the rubiks cube and put it in the left basket - subtask_index: 0 - - subtask: Place the tape in the center of the table - subtask_index: 1 - - subtask: Grasp the soft cleanser and put it in the left basket - subtask_index: 2 - - subtask: Grasp the back scratcher and put it in the left basket - subtask_index: 3 - - subtask: Grasp the apple and put it in the right basket - subtask_index: 4 - - subtask: Grasp the yellow marker and put it in the left basket - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: Grasp the white eraser and put it in the left basket - subtask_index: 7 - - subtask: Grasp the power strip and put it in the left basket - subtask_index: 8 - - subtask: Grasp the square chewing gum and put it in the right basket - subtask_index: 9 - - subtask: Grasp the cleaning agent and put it in the left basket - subtask_index: 10 - - subtask: Grasp the blue marker pen and put it in the right basket - subtask_index: 11 - - subtask: Grasp the soda water and put it in the right basket - subtask_index: 12 - - subtask: Grasp the spoon and put it in the left basket - subtask_index: 13 - - subtask: Grasp the duck toys and put it in the left basket - subtask_index: 14 - - subtask: Grasp the blue marker pen and put it in the left basket - subtask_index: 15 - - subtask: Grasp the shampoo and put it in the left basket - subtask_index: 16 - - subtask: Grasp the triangle cake and put it in the right basket - subtask_index: 17 - - subtask: Grasp the brown plate and put it in the left basket - subtask_index: 18 - - subtask: Grasp the cookie and put it in the right basket - subtask_index: 19 - - subtask: Grasp the yellow cake and put it in the right basket - subtask_index: 20 - - subtask: Grasp the shower sphere and put it in the left basket - subtask_index: 21 - - subtask: Grasp the orange and put it in the right basket - subtask_index: 22 - - subtask: Grasp the compass and put it in the left basket - subtask_index: 23 - - subtask: Grasp the round bread and put it in the right basket - subtask_index: 24 - - subtask: Grasp the lemon and put it in the right basket - subtask_index: 25 - - subtask: Grasp the egg yolk pastry and put it in the right basket - subtask_index: 26 - - subtask: Grasp the soap and put it in the left basket - subtask_index: 27 - - subtask: Grasp the washing liquid and put it in the left basket - subtask_index: 28 - - subtask: Grasp the hard cleanser and put it in the left basket - subtask_index: 29 - - subtask: Grasp the milk and put it in the right basket - subtask_index: 30 - - subtask: Grasp the black marker and put it in the left basket - subtask_index: 31 - - subtask: Grasp the banana and put it in the right basket - subtask_index: 32 - - subtask: Grasp the black glass cup and put it in the left basket - subtask_index: 33 - - subtask: Grasp the blue marker and put it in the right basket - subtask_index: 34 - - subtask: Grasp the bath ball and put it in the left basket - subtask_index: 35 - - subtask: Abnormal - subtask_index: 36 - - subtask: Grasp the peeler and put it in the left basket - subtask_index: 37 - - subtask: Grasp the brown towel and put it in the left basket - subtask_index: 38 - - subtask: Grasp the peach and put it in the right basket - subtask_index: 39 - - subtask: Grasp the tea cup and put it in the left basket - subtask_index: 40 - - subtask: Grasp the brush and put it in the left basket - subtask_index: 41 - - subtask: Grasp the chocolate and put it in the right basket - subtask_index: 42 - - subtask: Grasp the grey towel and put it in the left basket - subtask_index: 43 - - subtask: Place the peach doll in the center of the table - subtask_index: 44 - - subtask: Grasp the rubiks cube and put it in the right basket - subtask_index: 45 - - subtask: Grasp the tape and put it in the left basket - subtask_index: 46 - - subtask: Grasp the bread slice and put it in the right basket - subtask_index: 47 - - subtask: Grasp the glasses case and put it in the left basket - subtask_index: 48 - - subtask: Grasp the soda water and put it in the left basket - subtask_index: 49 - - subtask: Grasp the peach doll and put it in the right basket - subtask_index: 50 - - subtask: Grasp the blue cup and put it in the left basket - subtask_index: 51 - - subtask: Grasp the spoon and put it in the right basket - subtask_index: 52 - - subtask: Grasp the pen container and put it in the left basket - subtask_index: 53 - - subtask: Grasp the red duck and put it in the left basket - subtask_index: 54 - - subtask: Grasp the glasses case and put it in the right basket - subtask_index: 55 - - subtask: Grasp the long bread and put it in the right basket - subtask_index: 56 - - subtask: Grasp the yogurt and put it in the right basket - subtask_index: 57 - - subtask: Grasp the potato chips and put it in the right basket - subtask_index: 58 - - subtask: Grasp the can and put it in the right basket - subtask_index: 59 - - subtask: Grasp the long bread and put it in the left basket - subtask_index: 60 - - subtask: Grasp the yellow duck and put it in the left basket - subtask_index: 61 - - subtask: Grasp the coke and put it in the right basket - subtask_index: 62 - - subtask: 'null' - subtask_index: 63 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 134891 - dataset_size: 7.32 GB - data_structure: 'Galaxea_R1_Lite_classify_object_three_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (185 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Galaxea_R1_Lite_fold_towel_twice.yaml b/dataset_info/Galaxea_R1_Lite_fold_towel_twice.yaml index 2b0460085db6d3e94fea509f547cd8a55b6f3adb..34d6f71d823de1ccd3d9e0f8284d5e7b5dcea4b2 100644 --- a/dataset_info/Galaxea_R1_Lite_fold_towel_twice.yaml +++ b/dataset_info/Galaxea_R1_Lite_fold_towel_twice.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: gray_square_towel level1: daily_necessities level2: gray_square_towel @@ -45,44 +45,64 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Use the gripper to fold the yellow towel and grey towel in half - twice,then put the yellow towel on the grey towel. +task_instruction: +- Use the gripper to fold the yellow towel and grey towel in half twice,then put the + yellow towel on the grey towel. sub_tasks: -- Press the gray towel with the left gripper -- Fold the yellow towel from left to right with left gripper -- Press the gray towel with the right gripper -- Fold the yellow towel upwards with the right gripper -- Fold the gray towel from left to right with left gripper -- Press the yellow towel with the right gripper -- Fold the gray towel from right to left with right gripper -- End -- Place the yellow towel on the gray towel with the left gripper -- Fold the yellow towel upwards with the left gripper -- Place the grey towel on the yellow towel with the left gripper -- Place the grey towel on the yellow towel with the right gripper -- Fold the gray towel upwards with the right gripper -- Press the yellow towel with the left gripper -- Move the position of the yellow towel with the left gripper -- Place the yellow towel on the gray towel with the right gripper -- Fold the gray towel upwards with the left gripper -- Fold the yellow towel from right to left with right gripper -- 'null' +- subtask: Press the gray towel with the left gripper + subtask_index: 0 +- subtask: Fold the yellow towel from left to right with left gripper + subtask_index: 1 +- subtask: Press the gray towel with the right gripper + subtask_index: 2 +- subtask: Fold the yellow towel upwards with the right gripper + subtask_index: 3 +- subtask: Fold the gray towel from left to right with left gripper + subtask_index: 4 +- subtask: Press the yellow towel with the right gripper + subtask_index: 5 +- subtask: Fold the gray towel from right to left with right gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Place the yellow towel on the gray towel with the left gripper + subtask_index: 8 +- subtask: Fold the yellow towel upwards with the left gripper + subtask_index: 9 +- subtask: Place the grey towel on the yellow towel with the left gripper + subtask_index: 10 +- subtask: Place the grey towel on the yellow towel with the right gripper + subtask_index: 11 +- subtask: Fold the gray towel upwards with the right gripper + subtask_index: 12 +- subtask: Press the yellow towel with the left gripper + subtask_index: 13 +- subtask: Move the position of the yellow towel with the left gripper + subtask_index: 14 +- subtask: Place the yellow towel on the gray towel with the right gripper + subtask_index: 15 +- subtask: Fold the gray towel upwards with the left gripper + subtask_index: 16 +- subtask: Fold the yellow towel from right to left with right gripper + subtask_index: 17 +- subtask: 'null' + subtask_index: 18 atomic_actions: - grasp - pick - place - fold -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -93,13 +113,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -107,8 +124,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 46 total_frames: 50022 fps: 30 @@ -195,11 +211,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_fold_towel_twice_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:45 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -467,7 +481,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -475,7 +489,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -502,219 +515,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_fold_towel_twice - dataset_uuid: eb818363-8a30-492c-8639-78589399913b - scene_type: - level1: household - level2: bedroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - Use the gripper to fold the yellow towel and grey towel in half twice,then put - the yellow towel on the grey towel. - sub_tasks: - - subtask: Press the gray towel with the left gripper - subtask_index: 0 - - subtask: Fold the yellow towel from left to right with left gripper - subtask_index: 1 - - subtask: Press the gray towel with the right gripper - subtask_index: 2 - - subtask: Fold the yellow towel upwards with the right gripper - subtask_index: 3 - - subtask: Fold the gray towel from left to right with left gripper - subtask_index: 4 - - subtask: Press the yellow towel with the right gripper - subtask_index: 5 - - subtask: Fold the gray towel from right to left with right gripper - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: Place the yellow towel on the gray towel with the left gripper - subtask_index: 8 - - subtask: Fold the yellow towel upwards with the left gripper - subtask_index: 9 - - subtask: Place the grey towel on the yellow towel with the left gripper - subtask_index: 10 - - subtask: Place the grey towel on the yellow towel with the right gripper - subtask_index: 11 - - subtask: Fold the gray towel upwards with the right gripper - subtask_index: 12 - - subtask: Press the yellow towel with the left gripper - subtask_index: 13 - - subtask: Move the position of the yellow towel with the left gripper - subtask_index: 14 - - subtask: Place the yellow towel on the gray towel with the right gripper - subtask_index: 15 - - subtask: Fold the gray towel upwards with the left gripper - subtask_index: 16 - - subtask: Fold the yellow towel from right to left with right gripper - subtask_index: 17 - - subtask: 'null' - subtask_index: 18 - atomic_actions: - - grasp - - pick - - place - - fold - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 50022 - dataset_size: 2.14 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_fold_towel_twice_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (34 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_large_test_tube.yaml index 86d33abbe5c187850b765ecd3b3c584fa7302322..360ff9d3ad10656552be673faf349eb41a4eabf3 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,37 +69,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - blue pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with blue pigment by + grippers and pour them into the beaker. sub_tasks: -- Grasp the yellow reagent with the right gripper -- Pour the yellow reagent into the graduated cylinder and place the test tube into - the paper cup -- Pour the yellow reagent into the graduated cylinder with the right gripper -- Pour the blue reagent into the graduated cylinder and place the test tube into the - paper cup -- Pour the blue reagent into the graduated cylinder with the left gripper -- End -- Place the test tube into the paper cup with the right gripper -- Place the test tube into the paper cup with the left gripper -- Grasp the blue reagent with the left gripper -- 'null' +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 0 +- subtask: Pour the yellow reagent into the graduated cylinder and place the test + tube into the paper cup + subtask_index: 1 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 2 +- subtask: Pour the blue reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 3 +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 6 +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 7 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -110,13 +121,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -124,8 +132,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 51 total_frames: 29341 fps: 30 @@ -212,11 +219,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_large_test_tube |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:50 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -484,7 +489,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -492,7 +497,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -519,203 +523,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_blue_yellow_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with blue pigment - by grippers and pour them into the beaker. - sub_tasks: - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 0 - - subtask: Pour the yellow reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 1 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 2 - - subtask: Pour the blue reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 3 - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Place the test tube into the paper cup with the right gripper - subtask_index: 6 - - subtask: Place the test tube into the paper cup with the left gripper - subtask_index: 7 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 29341 - dataset_size: 1.59 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (39 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube.yaml index fd07ef2a9b53fb44de26f1817e3d24fcc851d87e..ef174ee9a4186ad9f27d9ecd713d29c8d04be797 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,32 +69,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - blue pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with blue pigment by + grippers and pour them into the beaker. sub_tasks: -- Pour the blue reagent into the graduated cylinder with the left gripper -- Pour the yellow reagent into the graduated cylinder with the left gripper -- Grasp the yellow reagent with the left gripper -- Grasp the blue reagent with the left gripper -- End -- Place the test tube into the bowl with the left gripper -- 'null' +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 0 +- subtask: Pour the yellow reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: Grasp the yellow reagent with the left gripper + subtask_index: 2 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the test tube into the bowl with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -105,13 +113,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -119,8 +124,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 45568 fps: 30 @@ -207,11 +211,9 @@ data_structure: 'Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube_qced_hardl |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -479,7 +481,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -487,7 +489,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -514,195 +515,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with blue pigment - by grippers and pour them into the beaker. - sub_tasks: - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 0 - - subtask: Pour the yellow reagent into the graduated cylinder with the left gripper - subtask_index: 1 - - subtask: Grasp the yellow reagent with the left gripper - subtask_index: 2 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the test tube into the bowl with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 45568 - dataset_size: 1.54 GB - data_structure: 'Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube.yaml index 6d25f441661d84e82f4160c645ee1837037e623d..e2f6811bb14aa5cb3afa5fe4b0c20a5e4268ab52 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,33 +69,42 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - blue pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with blue pigment by + grippers and pour them into the beaker. sub_tasks: -- Pour the blue reagent into the graduated cylinder with the left gripper -- Pour the yellow reagent into the graduated cylinder with the left gripper -- Grasp the yellow reagent with the left gripper -- Grasp the blue reagent with the left gripper -- End -- Place the test tube into the brown cup with the left gripper -- Grasp the red reagent with the right gripper -- 'null' +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 0 +- subtask: Pour the yellow reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: Grasp the yellow reagent with the left gripper + subtask_index: 2 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the test tube into the brown cup with the left gripper + subtask_index: 5 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -106,13 +115,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -120,8 +126,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 33650 fps: 30 @@ -208,11 +213,9 @@ data_structure: 'Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube_qced_hardl |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -480,7 +483,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -488,7 +491,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -515,197 +517,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with blue pigment - by grippers and pour them into the beaker. - sub_tasks: - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 0 - - subtask: Pour the yellow reagent into the graduated cylinder with the left gripper - subtask_index: 1 - - subtask: Grasp the yellow reagent with the left gripper - subtask_index: 2 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the test tube into the brown cup with the left gripper - subtask_index: 5 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 33650 - dataset_size: 1.50 GB - data_structure: 'Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_right.yaml b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_right.yaml index db98b3dc7850b31bbf8e9a5304147076b995a651..173f54dabad5b0445cf1578fccf2fa7f89d6c8cf 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_right.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_blue_yellow_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,42 +69,60 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - blue pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with blue pigment by + grippers and pour them into the beaker. sub_tasks: -- Abnormal -- Place the test tube into the bowl with right gripper -- Grasp the red reagent with the left gripper -- Pour the blue reagent into the graduated cylinder with right gripper -- Pick up the test tube containing the yellow reagent with right gripper -- Grasp the yellow reagent with the right gripper -- Grasp the red reagent with the right gripper -- Place the test tube into the bowl with the right gripper -- Pour the yellow reagent into the graduated cylinder with the right gripper -- End -- Pour the blue reagent into the graduated cylinder with the right gripper -- Pour the yellow reagent into the graduated cylinder with right gripper -- Pick up the test tube containing the blue reagent with right gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- end -- Grasp the blue reagent with the right gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Place the test tube into the bowl with right gripper + subtask_index: 1 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 2 +- subtask: Pour the blue reagent into the graduated cylinder with right gripper + subtask_index: 3 +- subtask: Pick up the test tube containing the yellow reagent with right gripper + subtask_index: 4 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 5 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 6 +- subtask: Place the test tube into the bowl with the right gripper + subtask_index: 7 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 8 +- subtask: End + subtask_index: 9 +- subtask: Pour the blue reagent into the graduated cylinder with the right gripper + subtask_index: 10 +- subtask: Pour the yellow reagent into the graduated cylinder with right gripper + subtask_index: 11 +- subtask: Pick up the test tube containing the blue reagent with right gripper + subtask_index: 12 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 13 +- subtask: end + subtask_index: 14 +- subtask: Grasp the blue reagent with the right gripper + subtask_index: 15 +- subtask: 'null' + subtask_index: 16 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -115,13 +133,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -129,8 +144,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 32657 fps: 30 @@ -217,11 +231,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_right_qced_hard |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -489,7 +501,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -497,7 +509,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -524,215 +535,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_blue_yellow_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with blue pigment - by grippers and pour them into the beaker. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Place the test tube into the bowl with right gripper - subtask_index: 1 - - subtask: Grasp the red reagent with the left gripper - subtask_index: 2 - - subtask: Pour the blue reagent into the graduated cylinder with right gripper - subtask_index: 3 - - subtask: Pick up the test tube containing the yellow reagent with right gripper - subtask_index: 4 - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 5 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 6 - - subtask: Place the test tube into the bowl with the right gripper - subtask_index: 7 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 8 - - subtask: End - subtask_index: 9 - - subtask: Pour the blue reagent into the graduated cylinder with the right gripper - subtask_index: 10 - - subtask: Pour the yellow reagent into the graduated cylinder with right gripper - subtask_index: 11 - - subtask: Pick up the test tube containing the blue reagent with right gripper - subtask_index: 12 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 13 - - subtask: end - subtask_index: 14 - - subtask: Grasp the blue reagent with the right gripper - subtask_index: 15 - - subtask: 'null' - subtask_index: 16 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 32657 - dataset_size: 665.69 MB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_blue_yellow_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_color.yaml b/dataset_info/Galaxea_R1_Lite_mix_color.yaml index 0a5df85d6c69988171ebf591ff794b5d8766a1d8..4136a5cf3fa00dbcb342f3fc28a162803cb66e3e 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_color.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_color.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,45 +69,62 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with red pigment the test tube with blue pigment - and the test tube with white pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with red pigment the test tube with blue pigment and the test + tube with white pigment by grippers and pour them into the beaker. sub_tasks: -- Pour the red reagent into the graduated cylinder and place the test tube into the - paper cup -- Pour the orange reagent into the graduated cylinder and place the test tube into - the paper cup -- Grasp the red reagent with the left gripper -- Pour the white reagent into the graduated cylinder with the right gripper -- Grasp the red reagent with the right gripper -- Pour the blue reagent into the graduated cylinder and place the test tube into the - paper cup -- Pour the blue reagent into the graduated cylinder with the left gripper -- Pour the red reagent into the graduated cylinder with the left gripper -- End -- Place the test tube into the paper cup with the right gripper -- Place the test tube into the paper cup with the left gripper -- Grasp the blue reagent with the left gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- Grasp the white reagent with the right gripper -- Pour the white reagent into the graduated cylinder and place the test tube into - the paper cup -- 'null' +- subtask: Pour the red reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 0 +- subtask: Pour the orange reagent into the graduated cylinder and place the test + tube into the paper cup + subtask_index: 1 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 2 +- subtask: Pour the white reagent into the graduated cylinder with the right gripper + subtask_index: 3 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 4 +- subtask: Pour the blue reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 5 +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 6 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 7 +- subtask: End + subtask_index: 8 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 9 +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 10 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 11 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 12 +- subtask: Grasp the white reagent with the right gripper + subtask_index: 13 +- subtask: Pour the white reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 14 +- subtask: 'null' + subtask_index: 15 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -118,13 +135,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -132,8 +146,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 79584 fps: 30 @@ -220,11 +233,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_color_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -492,7 +503,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -500,7 +511,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -527,217 +537,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_color - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with red pigment the test tube with blue pigment and the - test tube with white pigment by grippers and pour them into the beaker. - sub_tasks: - - subtask: Pour the red reagent into the graduated cylinder and place the test tube - into the paper cup - subtask_index: 0 - - subtask: Pour the orange reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 1 - - subtask: Grasp the red reagent with the left gripper - subtask_index: 2 - - subtask: Pour the white reagent into the graduated cylinder with the right gripper - subtask_index: 3 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 4 - - subtask: Pour the blue reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 5 - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 6 - - subtask: Pour the red reagent into the graduated cylinder with the left gripper - subtask_index: 7 - - subtask: End - subtask_index: 8 - - subtask: Place the test tube into the paper cup with the right gripper - subtask_index: 9 - - subtask: Place the test tube into the paper cup with the left gripper - subtask_index: 10 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 11 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 12 - - subtask: Grasp the white reagent with the right gripper - subtask_index: 13 - - subtask: Pour the white reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 14 - - subtask: 'null' - subtask_index: 15 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 79584 - dataset_size: 2.99 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_color_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml index 52585d2526623db4c3537707ac9592009494052b..245c084aab462f11e75a91037940497033790230 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: red_pigment level1: materials level2: red_pigment @@ -69,37 +69,50 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up a test tube with pigment on left test tube rack and a test - tube with pigment on right test tube rack by grippers and pour them into the beaker. +task_instruction: +- pick up a test tube with pigment on left test tube rack and a test tube with pigment + on right test tube rack by grippers and pour them into the beaker. sub_tasks: -- Place the test tube into the paper cup with the left gripper -- Pour the blue reagent into the graduated cylinder with the left gripper -- Pour the red reagent into the graduated cylinder with the left gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- Grasp the blue reagent with the left gripper -- End -- Place the test tube into the paper cup with the right gripper -- Grasp the yellow reagent with the right gripper -- Pour the yellow reagent into the graduated cylinder with the right gripper -- Grasp the red reagent with the left gripper -- Grasp the red reagent with the right gripper -- 'null' +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 0 +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 2 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 3 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 6 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 7 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 8 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 9 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -110,13 +123,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -124,8 +134,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 121 total_frames: 131656 fps: 30 @@ -212,11 +221,9 @@ data_structure: 'Galaxea_R1_Lite_mix_color_large_test_tube_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:120 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -484,7 +491,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -492,7 +499,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -519,205 +525,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_color_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up a test tube with pigment on left test tube rack and a test tube with pigment - on right test tube rack by grippers and pour them into the beaker. - sub_tasks: - - subtask: Place the test tube into the paper cup with the left gripper - subtask_index: 0 - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 1 - - subtask: Pour the red reagent into the graduated cylinder with the left gripper - subtask_index: 2 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 3 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Place the test tube into the paper cup with the right gripper - subtask_index: 6 - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 7 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 8 - - subtask: Grasp the red reagent with the left gripper - subtask_index: 9 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 10 - - subtask: 'null' - subtask_index: 11 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 131656 - dataset_size: 4.70 GB - data_structure: 'Galaxea_R1_Lite_mix_color_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (109 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Galaxea_R1_Lite_mix_color_small_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_color_small_test_tube.yaml index e4e38b2e7066134a6cac54b7a7cf7df8cc4bdb8f..322509cacd9d2c21d931b4d40c77d263e5c3267c 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_color_small_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_color_small_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -63,41 +63,58 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with red pigment and the test tube with blue - pigment by grippers and pour them into the container. +task_instruction: +- pick up the test tube with red pigment and the test tube with blue pigment by grippers + and pour them into the container. sub_tasks: -- Pour the blue reagent into the graduated cylinder with the left gripper -- Pour the red reagent into the graduated cylinder with the left gripper -- Pour the yellow reagent into the graduated cylinder with the left gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- Grasp the yellow reagent with the left gripper -- Grasp the blue reagent with the left gripper -- End -- Pour the blue reagent into the graduated cylinder with the right gripper -- Place the test tube into the brown cup with the left gripper -- Grasp the yellow reagent with the right gripper -- Pour the yellow reagent into the graduated cylinder with the right gripper -- Grasp the blue reagent with the right gripper -- Place the test tube into the brown cup with the right gripper -- Grasp the red reagent with the left gripper -- Grasp the red reagent with the right gripper -- 'null' +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 0 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: Pour the yellow reagent into the graduated cylinder with the left gripper + subtask_index: 2 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 3 +- subtask: Grasp the yellow reagent with the left gripper + subtask_index: 4 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Pour the blue reagent into the graduated cylinder with the right gripper + subtask_index: 7 +- subtask: Place the test tube into the brown cup with the left gripper + subtask_index: 8 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 9 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 10 +- subtask: Grasp the blue reagent with the right gripper + subtask_index: 11 +- subtask: Place the test tube into the brown cup with the right gripper + subtask_index: 12 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 13 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 14 +- subtask: 'null' + subtask_index: 15 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -108,13 +125,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -122,8 +136,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 203 total_frames: 147521 fps: 30 @@ -210,11 +223,9 @@ data_structure: 'Galaxea_R1_Lite_mix_color_small_test_tube_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:202 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -482,7 +493,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -490,7 +501,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -517,213 +527,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_color_small_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with red pigment and the test tube with blue pigment by - grippers and pour them into the container. - sub_tasks: - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 0 - - subtask: Pour the red reagent into the graduated cylinder with the left gripper - subtask_index: 1 - - subtask: Pour the yellow reagent into the graduated cylinder with the left gripper - subtask_index: 2 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 3 - - subtask: Grasp the yellow reagent with the left gripper - subtask_index: 4 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: Pour the blue reagent into the graduated cylinder with the right gripper - subtask_index: 7 - - subtask: Place the test tube into the brown cup with the left gripper - subtask_index: 8 - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 9 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 10 - - subtask: Grasp the blue reagent with the right gripper - subtask_index: 11 - - subtask: Place the test tube into the brown cup with the right gripper - subtask_index: 12 - - subtask: Grasp the red reagent with the left gripper - subtask_index: 13 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 14 - - subtask: 'null' - subtask_index: 15 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 147521 - dataset_size: 6.71 GB - data_structure: 'Galaxea_R1_Lite_mix_color_small_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (191 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 100K-1M diff --git a/dataset_info/Galaxea_R1_Lite_mix_red_blue_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_red_blue_large_test_tube.yaml index 05c194af727eef114c4d166613c466903eaa19f3..91aa6f2194b155cd61839a32f0f1387421a44486 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_red_blue_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_red_blue_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,37 +69,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with red pigment and the test tube with blue - pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with red pigment and the test tube with blue pigment by grippers + and pour them into the beaker. sub_tasks: -- Pour the red reagent into the graduated cylinder and place the test tube into the - paper cup -- Grasp the red reagent with the right gripper -- Pour the blue reagent into the graduated cylinder and place the test tube into the - paper cup -- Pour the blue reagent into the graduated cylinder with the left gripper -- End -- Place the test tube into the paper cup with the right gripper -- Place the test tube into the paper cup with the left gripper -- Grasp the blue reagent with the left gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- 'null' +- subtask: Pour the red reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 0 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 1 +- subtask: Pour the blue reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 2 +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 5 +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 6 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 7 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -110,13 +121,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -124,8 +132,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 28433 fps: 30 @@ -212,11 +219,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_large_test_tube_qc |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -484,7 +489,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -492,7 +497,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -519,203 +523,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_red_blue_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with red pigment and the test tube with blue pigment by - grippers and pour them into the beaker. - sub_tasks: - - subtask: Pour the red reagent into the graduated cylinder and place the test tube - into the paper cup - subtask_index: 0 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 1 - - subtask: Pour the blue reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 2 - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Place the test tube into the paper cup with the right gripper - subtask_index: 5 - - subtask: Place the test tube into the paper cup with the left gripper - subtask_index: 6 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 7 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 28433 - dataset_size: 1.57 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_red_blue_left_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_red_blue_left_large_test_tube.yaml index 0ab52f11796107c66fde336b893d7ea75d691134..808905cc3259ba3be5d7934d84563f647b710b6b 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_red_blue_left_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_red_blue_left_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,32 +69,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with red pigment and the test tube with blue - pigment by grippers and pour them into the container. +task_instruction: +- pick up the test tube with red pigment and the test tube with blue pigment by grippers + and pour them into the container. sub_tasks: -- Pour the blue reagent into the graduated cylinder with the left gripper -- Pour the red reagent into the graduated cylinder with the left gripper -- Grasp the blue reagent with the left gripper -- End -- Place the test tube into the bowl with the left gripper -- Grasp the red reagent with the left gripper -- 'null' +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 0 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the test tube into the bowl with the left gripper + subtask_index: 4 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -105,13 +113,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -119,8 +124,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 46733 fps: 30 @@ -207,11 +211,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_left_large_test_tu |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -479,7 +481,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -487,7 +489,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -514,195 +515,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_red_blue_left_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with red pigment and the test tube with blue pigment by - grippers and pour them into the container. - sub_tasks: - - subtask: Pour the blue reagent into the graduated cylinder with the left gripper - subtask_index: 0 - - subtask: Pour the red reagent into the graduated cylinder with the left gripper - subtask_index: 1 - - subtask: Grasp the blue reagent with the left gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the test tube into the bowl with the left gripper - subtask_index: 4 - - subtask: Grasp the red reagent with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 46733 - dataset_size: 1.57 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_left_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_red_blue_right.yaml b/dataset_info/Galaxea_R1_Lite_mix_red_blue_right.yaml index bb56fe8b0e04d44b82fedf4a600b6c713206a2f0..07c1dcc404e39f0eff7012451ba8894ba91d35fb 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_red_blue_right.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_red_blue_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: red_pigment level1: materials level2: red_pigment @@ -69,40 +69,56 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with red pigment and the test tube with blue - pigment by grippers and pour them into the container. +task_instruction: +- pick up the test tube with red pigment and the test tube with blue pigment by grippers + and pour them into the container. sub_tasks: -- Pour the red reagent into the graduated cylinder with right gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- Pick up the test tube containing the red reagent with right gripper -- End -- Place the test tube into the bowl with right gripper -- Pour the blue reagent into the graduated cylinder with the right gripper -- Pick up the test tube containing the blue reagent with right gripper -- Pour the blue reagent into the graduated cylinder with right gripper -- Grasp the blue reagent with the right gripper -- Grasp the yellow reagent with the right gripper -- Pour the yellow reagent into the graduated cylinder with the right gripper -- end -- Place the test tube into the bowl with the right gripper -- Grasp the red reagent with the right gripper -- 'null' +- subtask: Pour the red reagent into the graduated cylinder with right gripper + subtask_index: 0 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 1 +- subtask: Pick up the test tube containing the red reagent with right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the test tube into the bowl with right gripper + subtask_index: 4 +- subtask: Pour the blue reagent into the graduated cylinder with the right gripper + subtask_index: 5 +- subtask: Pick up the test tube containing the blue reagent with right gripper + subtask_index: 6 +- subtask: Pour the blue reagent into the graduated cylinder with right gripper + subtask_index: 7 +- subtask: Grasp the blue reagent with the right gripper + subtask_index: 8 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 9 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 10 +- subtask: end + subtask_index: 11 +- subtask: Place the test tube into the bowl with the right gripper + subtask_index: 12 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 13 +- subtask: 'null' + subtask_index: 14 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -113,13 +129,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -127,8 +140,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 32072 fps: 30 @@ -215,11 +227,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_right_qced_hardlin |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -487,7 +497,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -495,7 +505,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -522,211 +531,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_red_blue_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with red pigment and the test tube with blue pigment by - grippers and pour them into the container. - sub_tasks: - - subtask: Pour the red reagent into the graduated cylinder with right gripper - subtask_index: 0 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 1 - - subtask: Pick up the test tube containing the red reagent with right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the test tube into the bowl with right gripper - subtask_index: 4 - - subtask: Pour the blue reagent into the graduated cylinder with the right gripper - subtask_index: 5 - - subtask: Pick up the test tube containing the blue reagent with right gripper - subtask_index: 6 - - subtask: Pour the blue reagent into the graduated cylinder with right gripper - subtask_index: 7 - - subtask: Grasp the blue reagent with the right gripper - subtask_index: 8 - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 9 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 10 - - subtask: end - subtask_index: 11 - - subtask: Place the test tube into the bowl with the right gripper - subtask_index: 12 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 13 - - subtask: 'null' - subtask_index: 14 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 32072 - dataset_size: 647.52 MB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_blue_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_red_yellow_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_red_yellow_large_test_tube.yaml index 63de8b66141bff796e7050be1e5a44817ddcc1f9..7ef4d70c1a9a346a298b496b73817102fcf8ee29 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_red_yellow_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_red_yellow_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,37 +69,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - red pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with red pigment by + grippers and pour them into the beaker. sub_tasks: -- Pour the red reagent into the graduated cylinder and place the test tube into the - paper cup -- Grasp the red reagent with the left gripper -- Grasp the yellow reagent with the right gripper -- Pour the yellow reagent into the graduated cylinder and place the test tube into - the paper cup -- Pour the yellow reagent into the graduated cylinder with the right gripper -- Pour the red reagent into the graduated cylinder with the left gripper -- End -- Place the test tube into the paper cup with the right gripper -- Place the test tube into the paper cup with the left gripper -- 'null' +- subtask: Pour the red reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 0 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 1 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 2 +- subtask: Pour the yellow reagent into the graduated cylinder and place the test + tube into the paper cup + subtask_index: 3 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 4 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 7 +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 8 +- subtask: 'null' + subtask_index: 9 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -110,13 +121,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -124,8 +132,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 28289 fps: 30 @@ -212,11 +219,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_large_test_tube_ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -484,7 +489,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -492,7 +497,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -519,203 +523,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_red_yellow_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office&workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with red pigment by - grippers and pour them into the beaker. - sub_tasks: - - subtask: Pour the red reagent into the graduated cylinder and place the test tube - into the paper cup - subtask_index: 0 - - subtask: Grasp the red reagent with the left gripper - subtask_index: 1 - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 2 - - subtask: Pour the yellow reagent into the graduated cylinder and place the test - tube into the paper cup - subtask_index: 3 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 4 - - subtask: Pour the red reagent into the graduated cylinder with the left gripper - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: Place the test tube into the paper cup with the right gripper - subtask_index: 7 - - subtask: Place the test tube into the paper cup with the left gripper - subtask_index: 8 - - subtask: 'null' - subtask_index: 9 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 28289 - dataset_size: 1.58 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_red_yellow_left_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_red_yellow_left_large_test_tube.yaml index 062da383a4a4817d51a17da620fd802607d6af6a..20662f5708abdfcc8ab7f883033cf83624dd0ff0 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_red_yellow_left_large_test_tube.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_red_yellow_left_large_test_tube.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,32 +69,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - red pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with red pigment by + grippers and pour them into the beaker. sub_tasks: -- Grasp the red reagent with the left gripper -- Pour the red reagent into the graduated cylinder with the left gripper -- End -- Place the test tube into the pink bowl with the left gripper -- Grasp the yellow reagent with the left gripper -- Pour the yellow reagent into the graduated cylinder with the left gripper -- 'null' +- subtask: Grasp the red reagent with the left gripper + subtask_index: 0 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Place the test tube into the pink bowl with the left gripper + subtask_index: 3 +- subtask: Grasp the yellow reagent with the left gripper + subtask_index: 4 +- subtask: Pour the yellow reagent into the graduated cylinder with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -105,13 +113,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -119,8 +124,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 89688 fps: 30 @@ -207,11 +211,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_left_large_test_ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -479,7 +481,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -487,7 +489,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -514,195 +515,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_red_yellow_left_large_test_tube - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with red pigment by - grippers and pour them into the beaker. - sub_tasks: - - subtask: Grasp the red reagent with the left gripper - subtask_index: 0 - - subtask: Pour the red reagent into the graduated cylinder with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Place the test tube into the pink bowl with the left gripper - subtask_index: 3 - - subtask: Grasp the yellow reagent with the left gripper - subtask_index: 4 - - subtask: Pour the yellow reagent into the graduated cylinder with the left gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 89688 - dataset_size: 2.93 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_left_large_test_tube_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_mix_red_yellow_right.yaml b/dataset_info/Galaxea_R1_Lite_mix_red_yellow_right.yaml index 9b6e379ee5670b8b3b862e596eeac2b33f3af0f5..d481025b6913b9de04dffedd64519f072564b8fd 100644 --- a/dataset_info/Galaxea_R1_Lite_mix_red_yellow_right.yaml +++ b/dataset_info/Galaxea_R1_Lite_mix_red_yellow_right.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_pigment level1: materials level2: blue_pigment @@ -69,39 +69,54 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: pick up the test tube with yellow pigment and the test tube with - red pigment by grippers and pour them into the beaker. +task_instruction: +- pick up the test tube with yellow pigment and the test tube with red pigment by + grippers and pour them into the beaker. sub_tasks: -- Place the test tube into the bowl with right gripper -- Pick up the test tube containing the yellow reagent with right gripper -- Place the test tube into the pink bowl with the right gripper -- Grasp the yellow reagent with the right gripper -- Grasp the red reagent with the right gripper -- Pick up the test tube containing the red reagent with right gripper -- Place the test tube into the bowl with the right gripper -- Pour the yellow reagent into the graduated cylinder with the right gripper -- Pour the red reagent into the graduated cylinder with right gripper -- End -- Pour the yellow reagent into the graduated cylinder with right gripper -- Pour the red reagent into the graduated cylinder with the right gripper -- end -- 'null' +- subtask: Place the test tube into the bowl with right gripper + subtask_index: 0 +- subtask: Pick up the test tube containing the yellow reagent with right gripper + subtask_index: 1 +- subtask: Place the test tube into the pink bowl with the right gripper + subtask_index: 2 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 3 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 4 +- subtask: Pick up the test tube containing the red reagent with right gripper + subtask_index: 5 +- subtask: Place the test tube into the bowl with the right gripper + subtask_index: 6 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 7 +- subtask: Pour the red reagent into the graduated cylinder with right gripper + subtask_index: 8 +- subtask: End + subtask_index: 9 +- subtask: Pour the yellow reagent into the graduated cylinder with right gripper + subtask_index: 10 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 11 +- subtask: end + subtask_index: 12 +- subtask: 'null' + subtask_index: 13 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -112,13 +127,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -126,8 +138,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 61864 fps: 30 @@ -214,11 +225,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_right_qced_hardl |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -486,7 +495,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -494,7 +503,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -521,209 +529,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_mix_red_yellow_right - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - pick up the test tube with yellow pigment and the test tube with red pigment by - grippers and pour them into the beaker. - sub_tasks: - - subtask: Place the test tube into the bowl with right gripper - subtask_index: 0 - - subtask: Pick up the test tube containing the yellow reagent with right gripper - subtask_index: 1 - - subtask: Place the test tube into the pink bowl with the right gripper - subtask_index: 2 - - subtask: Grasp the yellow reagent with the right gripper - subtask_index: 3 - - subtask: Grasp the red reagent with the right gripper - subtask_index: 4 - - subtask: Pick up the test tube containing the red reagent with right gripper - subtask_index: 5 - - subtask: Place the test tube into the bowl with the right gripper - subtask_index: 6 - - subtask: Pour the yellow reagent into the graduated cylinder with the right gripper - subtask_index: 7 - - subtask: Pour the red reagent into the graduated cylinder with right gripper - subtask_index: 8 - - subtask: End - subtask_index: 9 - - subtask: Pour the yellow reagent into the graduated cylinder with right gripper - subtask_index: 10 - - subtask: Pour the red reagent into the graduated cylinder with the right gripper - subtask_index: 11 - - subtask: end - subtask_index: 12 - - subtask: 'null' - subtask_index: 13 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 61864 - dataset_size: 1.22 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_red_yellow_right_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_move_mouse.yaml b/dataset_info/Galaxea_R1_Lite_move_mouse.yaml index 6e12f2a62356f0c6aa1492d8c4659a629c4febd0..380850ed30b29bffd350adf6878d17b9aadac1e6 100644 --- a/dataset_info/Galaxea_R1_Lite_move_mouse.yaml +++ b/dataset_info/Galaxea_R1_Lite_move_mouse.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: mouse level1: computer_peripherals level2: mouse @@ -45,28 +45,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to place the mouse on the left side of the - table onto the mouse pad on the right side. +task_instruction: +- use the left gripper to place the mouse on the left side of the table onto the mouse + pad on the right side. sub_tasks: -- Grasp the mouse with the left gripper -- Place the mouse on the mouse pad with the left gripper -- End -- 'null' +- subtask: Grasp the mouse with the left gripper + subtask_index: 0 +- subtask: Place the mouse on the mouse pad with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -77,13 +82,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -91,8 +93,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 30 total_frames: 8927 fps: 30 @@ -179,11 +180,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_move_mouse_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:29 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -451,7 +450,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -459,7 +458,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -486,188 +484,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_move_mouse - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: office_workspace - level2: office - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to place the mouse on the left side of the table onto the - mouse pad on the right side. - sub_tasks: - - subtask: Grasp the mouse with the left gripper - subtask_index: 0 - - subtask: Place the mouse on the mouse pad with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 8927 - dataset_size: 219.01 MB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_move_mouse_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (18 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 0-10K diff --git a/dataset_info/Galaxea_R1_Lite_pour_liquid_mrable_bar_counter.yaml b/dataset_info/Galaxea_R1_Lite_pour_liquid_mrable_bar_counter.yaml index 11b73ec68488f8273dd50c7946252f1385e47adf..4ffcb825466ab0908f04af8d89447856dd20e84a 100644 --- a/dataset_info/Galaxea_R1_Lite_pour_liquid_mrable_bar_counter.yaml +++ b/dataset_info/Galaxea_R1_Lite_pour_liquid_mrable_bar_counter.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: marble_bar_counter level1: furniture level2: marble_bar_counter @@ -63,60 +63,95 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick up the cup and pour the liquid into a bowl - or tray. +task_instruction: +- use a gripper to pick up the cup and pour the liquid into a bowl or tray. sub_tasks: -- Pour the orange juice into the pink bowl with left gripper -- Pour the black tea into the pink bowl with the right gripper -- Grasp the glass of black tea with the right gripper -- Pour the orange juice into the blue basin with right gripper -- Left gripper -- Pour the tea into the pink bowl with left gripper -- Grasp the glass of orange juice with the right gripper -- Pour the tea into the pink bowl with right gripper -- Pick up blue cup filled with tea with right gripper -- Pour the orange juice into the pink bowl with the right gripper -- Pour the orange juice into the pink bowl with the left gripper -- Pour the orange juice into the blue basin with left gripper -- Pour the orange juice into the green bowl with the right gripper -- Place blue cup with tea on the table with right gripper -- Pour the orange juice into the green bowl with the left gripper -- Pour the orange juice into the pink bowl with right gripper -- Place blue cup with orange juice on the table with right gripper -- Pour the black tea into the green bowl with the right gripper -- Pick up blue cup filled with orange juice with right gripper -- Place blue cup with orange juice on the table with left gripper -- Grasp the glass of black tea with the left gripper -- Pour the black tea into the green bowl with the left gripper -- Pour the tea into the blue basin with right gripper -- Pick up blue cup filled with tea with left gripper -- Pour the tea into the blue basin with left gripper -- Pour the black tea into the pink bowl with the left gripper -- Grasp the glass of orange juice with the left gripper -- Place the glass cup with the right gripper -- Pick up blue cup filled with orange juice with left gripper -- End -- Place blue cup with shrimp on the table with right gripper -- Right gripper -- Place blue cup with tea on the table with left gripper -- Place the glass cup with the left gripper -- 'null' +- subtask: Pour the orange juice into the pink bowl with left gripper + subtask_index: 0 +- subtask: Pour the black tea into the pink bowl with the right gripper + subtask_index: 1 +- subtask: Grasp the glass of black tea with the right gripper + subtask_index: 2 +- subtask: Pour the orange juice into the blue basin with right gripper + subtask_index: 3 +- subtask: Left gripper + subtask_index: 4 +- subtask: Pour the tea into the pink bowl with left gripper + subtask_index: 5 +- subtask: Grasp the glass of orange juice with the right gripper + subtask_index: 6 +- subtask: Pour the tea into the pink bowl with right gripper + subtask_index: 7 +- subtask: Pick up blue cup filled with tea with right gripper + subtask_index: 8 +- subtask: Pour the orange juice into the pink bowl with the right gripper + subtask_index: 9 +- subtask: Pour the orange juice into the pink bowl with the left gripper + subtask_index: 10 +- subtask: Pour the orange juice into the blue basin with left gripper + subtask_index: 11 +- subtask: Pour the orange juice into the green bowl with the right gripper + subtask_index: 12 +- subtask: Place blue cup with tea on the table with right gripper + subtask_index: 13 +- subtask: Pour the orange juice into the green bowl with the left gripper + subtask_index: 14 +- subtask: Pour the orange juice into the pink bowl with right gripper + subtask_index: 15 +- subtask: Place blue cup with orange juice on the table with right gripper + subtask_index: 16 +- subtask: Pour the black tea into the green bowl with the right gripper + subtask_index: 17 +- subtask: Pick up blue cup filled with orange juice with right gripper + subtask_index: 18 +- subtask: Place blue cup with orange juice on the table with left gripper + subtask_index: 19 +- subtask: Grasp the glass of black tea with the left gripper + subtask_index: 20 +- subtask: Pour the black tea into the green bowl with the left gripper + subtask_index: 21 +- subtask: Pour the tea into the blue basin with right gripper + subtask_index: 22 +- subtask: Pick up blue cup filled with tea with left gripper + subtask_index: 23 +- subtask: Pour the tea into the blue basin with left gripper + subtask_index: 24 +- subtask: Pour the black tea into the pink bowl with the left gripper + subtask_index: 25 +- subtask: Grasp the glass of orange juice with the left gripper + subtask_index: 26 +- subtask: Place the glass cup with the right gripper + subtask_index: 27 +- subtask: Pick up blue cup filled with orange juice with left gripper + subtask_index: 28 +- subtask: End + subtask_index: 29 +- subtask: Place blue cup with shrimp on the table with right gripper + subtask_index: 30 +- subtask: Right gripper + subtask_index: 31 +- subtask: Place blue cup with tea on the table with left gripper + subtask_index: 32 +- subtask: Place the glass cup with the left gripper + subtask_index: 33 +- subtask: 'null' + subtask_index: 34 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -127,13 +162,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -141,8 +173,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 43652 fps: 30 @@ -229,11 +260,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_liquid_mrable_bar_counter_ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -501,7 +530,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -509,7 +538,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -536,250 +564,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_pour_liquid_mrable_bar_counter - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: househhold - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick up the cup and pour the liquid into a bowl or tray. - sub_tasks: - - subtask: Pour the orange juice into the pink bowl with left gripper - subtask_index: 0 - - subtask: Pour the black tea into the pink bowl with the right gripper - subtask_index: 1 - - subtask: Grasp the glass of black tea with the right gripper - subtask_index: 2 - - subtask: Pour the orange juice into the blue basin with right gripper - subtask_index: 3 - - subtask: Left gripper - subtask_index: 4 - - subtask: Pour the tea into the pink bowl with left gripper - subtask_index: 5 - - subtask: Grasp the glass of orange juice with the right gripper - subtask_index: 6 - - subtask: Pour the tea into the pink bowl with right gripper - subtask_index: 7 - - subtask: Pick up blue cup filled with tea with right gripper - subtask_index: 8 - - subtask: Pour the orange juice into the pink bowl with the right gripper - subtask_index: 9 - - subtask: Pour the orange juice into the pink bowl with the left gripper - subtask_index: 10 - - subtask: Pour the orange juice into the blue basin with left gripper - subtask_index: 11 - - subtask: Pour the orange juice into the green bowl with the right gripper - subtask_index: 12 - - subtask: Place blue cup with tea on the table with right gripper - subtask_index: 13 - - subtask: Pour the orange juice into the green bowl with the left gripper - subtask_index: 14 - - subtask: Pour the orange juice into the pink bowl with right gripper - subtask_index: 15 - - subtask: Place blue cup with orange juice on the table with right gripper - subtask_index: 16 - - subtask: Pour the black tea into the green bowl with the right gripper - subtask_index: 17 - - subtask: Pick up blue cup filled with orange juice with right gripper - subtask_index: 18 - - subtask: Place blue cup with orange juice on the table with left gripper - subtask_index: 19 - - subtask: Grasp the glass of black tea with the left gripper - subtask_index: 20 - - subtask: Pour the black tea into the green bowl with the left gripper - subtask_index: 21 - - subtask: Pour the tea into the blue basin with right gripper - subtask_index: 22 - - subtask: Pick up blue cup filled with tea with left gripper - subtask_index: 23 - - subtask: Pour the tea into the blue basin with left gripper - subtask_index: 24 - - subtask: Pour the black tea into the pink bowl with the left gripper - subtask_index: 25 - - subtask: Grasp the glass of orange juice with the left gripper - subtask_index: 26 - - subtask: Place the glass cup with the right gripper - subtask_index: 27 - - subtask: Pick up blue cup filled with orange juice with left gripper - subtask_index: 28 - - subtask: End - subtask_index: 29 - - subtask: Place blue cup with shrimp on the table with right gripper - subtask_index: 30 - - subtask: Right gripper - subtask_index: 31 - - subtask: Place blue cup with tea on the table with left gripper - subtask_index: 32 - - subtask: Place the glass cup with the left gripper - subtask_index: 33 - - subtask: 'null' - subtask_index: 34 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 43652 - dataset_size: 1.70 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_liquid_mrable_bar_counter_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_pour_powder.yaml b/dataset_info/Galaxea_R1_Lite_pour_powder.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8412c432cf3ff26ab0526dd5bcc24da98542d54d --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_pour_powder.yaml @@ -0,0 +1,531 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_pour_powder +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: ousehhold + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: white_table + level1: furniture + level2: white_table + level3: null + level4: null + level5: null +- object_name: plastic_cup + level1: cups + level2: plastic_cup + level3: null + level4: null + level5: null +- object_name: green_dish + level1: plates + level2: green_dish + level3: null + level4: null + level5: null +- object_name: pink_bowl + level1: plastic_bowls + level2: pink_bowl + level3: null + level4: null + level5: null +- object_name: powder + level1: materials + level2: powder + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a gripper to pick up the cup and pour the powder into a bowl or tray. +sub_tasks: +- subtask: Pour the powder into the green bowl with the left gripper + subtask_index: 0 +- subtask: Pour the powder into the pink bowl with the right gripper + subtask_index: 1 +- subtask: Pour the milk powder into the blue basin with left gripper + subtask_index: 2 +- subtask: Place the cup on the table with the right gripper + subtask_index: 3 +- subtask: Pick up blue cup filled with milk powder with left gripper + subtask_index: 4 +- subtask: Pour the milk powder into the pink bowl with left gripper + subtask_index: 5 +- subtask: Place blue cup with coffee powder on the table with left gripper + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Grasp the glass of powder with the right gripper + subtask_index: 8 +- subtask: Grasp the glass of powder with the left gripper + subtask_index: 9 +- subtask: Place the cup on the table with the left gripper + subtask_index: 10 +- subtask: Place blue cup with milk powder on the table with left gripper + subtask_index: 11 +- subtask: Pour the powder into the pink bowl with the left gripper + subtask_index: 12 +- subtask: Pour the powder into the green bowl with the right gripper + subtask_index: 13 +- subtask: Left gripper + subtask_index: 14 +- subtask: 'null' + subtask_index: 15 +atomic_actions: +- grasp +- pick +- place +- pour +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=360x640x3, resolution=640x360, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=360x640x3, resolution=640x360, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 30 + total_frames: 14053 + fps: 30 + total_tasks: 16 + total_videos: 120 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 375.56 MB +frame_num: 14053 +dataset_size: 375.56 MB +data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_powder_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (18 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:29 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 360 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 360 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 360 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 360 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml b/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml index 600b7f4e7fdb3221014df8070d41d5cc3f0c9a12..e219793bad3343dca2950c7349b84423fca6c3dc 100644 --- a/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml +++ b/dataset_info/Galaxea_R1_Lite_pour_powder_marble_bar_counter.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: marble_bar_counter level1: furniture level2: marble_bar_counter @@ -63,53 +63,81 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick up the cup and pour the powder into a bowl - or tray. +task_instruction: +- use a gripper to pick up the cup and pour the powder into a bowl or tray. sub_tasks: -- Pour the coffee powder into the pink bowl with right gripper -- Pour the powder into the pink bowl with the right gripper -- Pick up blue cup filled with milk powder with left gripper -- Pour the milk powder into the pink bowl with left gripper -- Place the glass cup down with the right gripper -- Place blue cup with milk powder on the table with left gripper -- Pour the powder into the pink bowl with the left gripper -- Pour the coffee powder into the pink bowl with left gripper -- Left gripper -- Pick up blue cup filled with coffee powder with right gripper -- Pick up blue cup filled with milk powder with right gripper -- Pick up blue cup filled with coffee powder with left gripper -- Place blue cup with coffee powder on the table with right gripper -- Pour the powder into the green bowl with the left gripper -- Pour the coffee powder into the blue basin with right gripper -- Pour the coffee powder into the blue basin with left gripper -- Grasp the glass of powder with the right gripper -- Place the glass cup down with the left gripper -- Pour the milk powder into the pink bowl with right gripper -- Pour the powder into the green bowl with the right gripper -- Pour the milk powder into the blue basin with left gripper -- Place blue cup with milk powder on the table with right gripper -- Place blue cup with coffee powder on the table with left gripper -- Pour the milk powder into the blue basin with right gripper -- End -- Grasp the glass of powder with the left gripper -- Right gripper -- 'null' +- subtask: Pour the coffee powder into the pink bowl with right gripper + subtask_index: 0 +- subtask: Pour the powder into the pink bowl with the right gripper + subtask_index: 1 +- subtask: Pick up blue cup filled with milk powder with left gripper + subtask_index: 2 +- subtask: Pour the milk powder into the pink bowl with left gripper + subtask_index: 3 +- subtask: Place the glass cup down with the right gripper + subtask_index: 4 +- subtask: Place blue cup with milk powder on the table with left gripper + subtask_index: 5 +- subtask: Pour the powder into the pink bowl with the left gripper + subtask_index: 6 +- subtask: Pour the coffee powder into the pink bowl with left gripper + subtask_index: 7 +- subtask: Left gripper + subtask_index: 8 +- subtask: Pick up blue cup filled with coffee powder with right gripper + subtask_index: 9 +- subtask: Pick up blue cup filled with milk powder with right gripper + subtask_index: 10 +- subtask: Pick up blue cup filled with coffee powder with left gripper + subtask_index: 11 +- subtask: Place blue cup with coffee powder on the table with right gripper + subtask_index: 12 +- subtask: Pour the powder into the green bowl with the left gripper + subtask_index: 13 +- subtask: Pour the coffee powder into the blue basin with right gripper + subtask_index: 14 +- subtask: Pour the coffee powder into the blue basin with left gripper + subtask_index: 15 +- subtask: Grasp the glass of powder with the right gripper + subtask_index: 16 +- subtask: Place the glass cup down with the left gripper + subtask_index: 17 +- subtask: Pour the milk powder into the pink bowl with right gripper + subtask_index: 18 +- subtask: Pour the powder into the green bowl with the right gripper + subtask_index: 19 +- subtask: Pour the milk powder into the blue basin with left gripper + subtask_index: 20 +- subtask: Place blue cup with milk powder on the table with right gripper + subtask_index: 21 +- subtask: Place blue cup with coffee powder on the table with left gripper + subtask_index: 22 +- subtask: Pour the milk powder into the blue basin with right gripper + subtask_index: 23 +- subtask: End + subtask_index: 24 +- subtask: Grasp the glass of powder with the left gripper + subtask_index: 25 +- subtask: Right gripper + subtask_index: 26 +- subtask: 'null' + subtask_index: 27 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -120,13 +148,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -134,8 +159,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 39829 fps: 30 @@ -222,11 +246,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_powder_marble_bar_counter_ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -494,7 +516,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -502,7 +524,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -529,236 +550,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_pour_powder_marble_bar_counter - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: househhold - level2: itchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick up the cup and pour the powder into a bowl or tray. - sub_tasks: - - subtask: Pour the coffee powder into the pink bowl with right gripper - subtask_index: 0 - - subtask: Pour the powder into the pink bowl with the right gripper - subtask_index: 1 - - subtask: Pick up blue cup filled with milk powder with left gripper - subtask_index: 2 - - subtask: Pour the milk powder into the pink bowl with left gripper - subtask_index: 3 - - subtask: Place the glass cup down with the right gripper - subtask_index: 4 - - subtask: Place blue cup with milk powder on the table with left gripper - subtask_index: 5 - - subtask: Pour the powder into the pink bowl with the left gripper - subtask_index: 6 - - subtask: Pour the coffee powder into the pink bowl with left gripper - subtask_index: 7 - - subtask: Left gripper - subtask_index: 8 - - subtask: Pick up blue cup filled with coffee powder with right gripper - subtask_index: 9 - - subtask: Pick up blue cup filled with milk powder with right gripper - subtask_index: 10 - - subtask: Pick up blue cup filled with coffee powder with left gripper - subtask_index: 11 - - subtask: Place blue cup with coffee powder on the table with right gripper - subtask_index: 12 - - subtask: Pour the powder into the green bowl with the left gripper - subtask_index: 13 - - subtask: Pour the coffee powder into the blue basin with right gripper - subtask_index: 14 - - subtask: Pour the coffee powder into the blue basin with left gripper - subtask_index: 15 - - subtask: Grasp the glass of powder with the right gripper - subtask_index: 16 - - subtask: Place the glass cup down with the left gripper - subtask_index: 17 - - subtask: Pour the milk powder into the pink bowl with right gripper - subtask_index: 18 - - subtask: Pour the powder into the green bowl with the right gripper - subtask_index: 19 - - subtask: Pour the milk powder into the blue basin with left gripper - subtask_index: 20 - - subtask: Place blue cup with milk powder on the table with right gripper - subtask_index: 21 - - subtask: Place blue cup with coffee powder on the table with left gripper - subtask_index: 22 - - subtask: Pour the milk powder into the blue basin with right gripper - subtask_index: 23 - - subtask: End - subtask_index: 24 - - subtask: Grasp the glass of powder with the left gripper - subtask_index: 25 - - subtask: Right gripper - subtask_index: 26 - - subtask: 'null' - subtask_index: 27 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 39829 - dataset_size: 1.58 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_powder_marble_bar_counter_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_pour_solid.yaml b/dataset_info/Galaxea_R1_Lite_pour_solid.yaml index 61bab04c54f200c7d674ebf6326a7c5943bbe47d..99a4bd51cf1fb2e4733bca3260041ab15066fb65 100644 --- a/dataset_info/Galaxea_R1_Lite_pour_solid.yaml +++ b/dataset_info/Galaxea_R1_Lite_pour_solid.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_table level1: furniture level2: white_table @@ -63,60 +63,95 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick up the cup and pour the solid into a bowl - or tray. +task_instruction: +- use a gripper to pick up the cup and pour the solid into a bowl or tray. sub_tasks: -- Grasp the glass of shrimp with the left gripper -- Pick up blue cup filled with coffee beans with left gripper -- Place blue cup with coffee beans on the table with left gripper -- Pour the shrimp into the green bowl with the left gripper -- Pour the coffee beans into the pink bowl with the left gripper -- Pour the coffee beans into the pink bowl with the right gripper -- Left gripper -- Pour the shrimp into the green bowl with the right gripper -- Pick up blue cup filled with shrimp with left gripper -- Place blue cup with shrimp on the table with left gripper -- Pick up blue cup filled with coffee beans with right gripper -- Grasp the glass of coffee beans with the left gripper -- Pour the shrimp into the pink bowl with left gripper -- Pour the coffee beans into the green bowl with the left gripper -- Pour the coffee beans into the pink bowl with left gripper -- Grasp the glass of coffee beans with the right gripper -- Pour the coffee beans into the green plate with the right gripper -- Pour the shrimp into the pink bowl with the right gripper -- Grasp the glass of shrimp with the right gripper -- Pour the coffee beans into the green plate with the left gripper -- Place blue cup with coffee powder on the table with left gripper -- Place the glass cup with the right gripper -- Place blue cup with coffee beans on the table with right gripper -- End -- Pour the coffee beans into the green bowl with the right gripper -- Place blue cup with shrimp on the table with right gripper -- Pick up blue cup filled with shrimp with right gripper -- Pour the shrimp into the pink bowl with right gripper -- Pour the shrimp into the pink bowl with the left gripper -- Pour the shrimp into the blue basin with left gripper -- Pour the coffee beans into the blue basin with right gripper -- Right gripper -- Pour the shrimp into the blue basin with right gripper -- Place the glass cup with the left gripper -- 'null' +- subtask: Grasp the glass of shrimp with the left gripper + subtask_index: 0 +- subtask: Pick up blue cup filled with coffee beans with left gripper + subtask_index: 1 +- subtask: Place blue cup with coffee beans on the table with left gripper + subtask_index: 2 +- subtask: Pour the shrimp into the green bowl with the left gripper + subtask_index: 3 +- subtask: Pour the coffee beans into the pink bowl with the left gripper + subtask_index: 4 +- subtask: Pour the coffee beans into the pink bowl with the right gripper + subtask_index: 5 +- subtask: Left gripper + subtask_index: 6 +- subtask: Pour the shrimp into the green bowl with the right gripper + subtask_index: 7 +- subtask: Pick up blue cup filled with shrimp with left gripper + subtask_index: 8 +- subtask: Place blue cup with shrimp on the table with left gripper + subtask_index: 9 +- subtask: Pick up blue cup filled with coffee beans with right gripper + subtask_index: 10 +- subtask: Grasp the glass of coffee beans with the left gripper + subtask_index: 11 +- subtask: Pour the shrimp into the pink bowl with left gripper + subtask_index: 12 +- subtask: Pour the coffee beans into the green bowl with the left gripper + subtask_index: 13 +- subtask: Pour the coffee beans into the pink bowl with left gripper + subtask_index: 14 +- subtask: Grasp the glass of coffee beans with the right gripper + subtask_index: 15 +- subtask: Pour the coffee beans into the green plate with the right gripper + subtask_index: 16 +- subtask: Pour the shrimp into the pink bowl with the right gripper + subtask_index: 17 +- subtask: Grasp the glass of shrimp with the right gripper + subtask_index: 18 +- subtask: Pour the coffee beans into the green plate with the left gripper + subtask_index: 19 +- subtask: Place blue cup with coffee powder on the table with left gripper + subtask_index: 20 +- subtask: Place the glass cup with the right gripper + subtask_index: 21 +- subtask: Place blue cup with coffee beans on the table with right gripper + subtask_index: 22 +- subtask: End + subtask_index: 23 +- subtask: Pour the coffee beans into the green bowl with the right gripper + subtask_index: 24 +- subtask: Place blue cup with shrimp on the table with right gripper + subtask_index: 25 +- subtask: Pick up blue cup filled with shrimp with right gripper + subtask_index: 26 +- subtask: Pour the shrimp into the pink bowl with right gripper + subtask_index: 27 +- subtask: Pour the shrimp into the pink bowl with the left gripper + subtask_index: 28 +- subtask: Pour the shrimp into the blue basin with left gripper + subtask_index: 29 +- subtask: Pour the coffee beans into the blue basin with right gripper + subtask_index: 30 +- subtask: Right gripper + subtask_index: 31 +- subtask: Pour the shrimp into the blue basin with right gripper + subtask_index: 32 +- subtask: Place the glass cup with the left gripper + subtask_index: 33 +- subtask: 'null' + subtask_index: 34 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -127,13 +162,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -141,8 +173,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 39 total_frames: 16353 fps: 30 @@ -229,11 +260,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_solid_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:38 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -501,7 +530,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -509,7 +538,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -536,250 +564,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_pour_solid - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: ousehhold - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick up the cup and pour the solid into a bowl or tray. - sub_tasks: - - subtask: Grasp the glass of shrimp with the left gripper - subtask_index: 0 - - subtask: Pick up blue cup filled with coffee beans with left gripper - subtask_index: 1 - - subtask: Place blue cup with coffee beans on the table with left gripper - subtask_index: 2 - - subtask: Pour the shrimp into the green bowl with the left gripper - subtask_index: 3 - - subtask: Pour the coffee beans into the pink bowl with the left gripper - subtask_index: 4 - - subtask: Pour the coffee beans into the pink bowl with the right gripper - subtask_index: 5 - - subtask: Left gripper - subtask_index: 6 - - subtask: Pour the shrimp into the green bowl with the right gripper - subtask_index: 7 - - subtask: Pick up blue cup filled with shrimp with left gripper - subtask_index: 8 - - subtask: Place blue cup with shrimp on the table with left gripper - subtask_index: 9 - - subtask: Pick up blue cup filled with coffee beans with right gripper - subtask_index: 10 - - subtask: Grasp the glass of coffee beans with the left gripper - subtask_index: 11 - - subtask: Pour the shrimp into the pink bowl with left gripper - subtask_index: 12 - - subtask: Pour the coffee beans into the green bowl with the left gripper - subtask_index: 13 - - subtask: Pour the coffee beans into the pink bowl with left gripper - subtask_index: 14 - - subtask: Grasp the glass of coffee beans with the right gripper - subtask_index: 15 - - subtask: Pour the coffee beans into the green plate with the right gripper - subtask_index: 16 - - subtask: Pour the shrimp into the pink bowl with the right gripper - subtask_index: 17 - - subtask: Grasp the glass of shrimp with the right gripper - subtask_index: 18 - - subtask: Pour the coffee beans into the green plate with the left gripper - subtask_index: 19 - - subtask: Place blue cup with coffee powder on the table with left gripper - subtask_index: 20 - - subtask: Place the glass cup with the right gripper - subtask_index: 21 - - subtask: Place blue cup with coffee beans on the table with right gripper - subtask_index: 22 - - subtask: End - subtask_index: 23 - - subtask: Pour the coffee beans into the green bowl with the right gripper - subtask_index: 24 - - subtask: Place blue cup with shrimp on the table with right gripper - subtask_index: 25 - - subtask: Pick up blue cup filled with shrimp with right gripper - subtask_index: 26 - - subtask: Pour the shrimp into the pink bowl with right gripper - subtask_index: 27 - - subtask: Pour the shrimp into the pink bowl with the left gripper - subtask_index: 28 - - subtask: Pour the shrimp into the blue basin with left gripper - subtask_index: 29 - - subtask: Pour the coffee beans into the blue basin with right gripper - subtask_index: 30 - - subtask: Right gripper - subtask_index: 31 - - subtask: Pour the shrimp into the blue basin with right gripper - subtask_index: 32 - - subtask: Place the glass cup with the left gripper - subtask_index: 33 - - subtask: 'null' - subtask_index: 34 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 16353 - dataset_size: 459.77 MB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_solid_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (27 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_pour_solid_marble_bar_counter.yaml b/dataset_info/Galaxea_R1_Lite_pour_solid_marble_bar_counter.yaml index 1a772d715a3ceef8875a011c81a6f0a94c34d047..f44a4f9a5fbbab5b118dd713773b0e41d82c4cc7 100644 --- a/dataset_info/Galaxea_R1_Lite_pour_solid_marble_bar_counter.yaml +++ b/dataset_info/Galaxea_R1_Lite_pour_solid_marble_bar_counter.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: marble_bar_counter level1: furniture level2: marble_bar_counter @@ -63,42 +63,59 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick up the cup and pour the solid into a bowl - or tray. +task_instruction: +- use a gripper to pick up the cup and pour the solid into a bowl or tray. sub_tasks: -- Abnormal -- Grasp the glass of shrimp with the left gripper -- Pour the shrimp beans into the green bowl with the left gripper -- Pour the shrimp beans into the green bowl with the right gripper -- Grasp the glass of coffee beans with the right gripper -- Grasp the glass of coffee beans with the left gripper -- Place the glass cup with the right gripper -- Pour the shrimp into the pink bowl with the left gripper -- End -- Pour the coffee beans into the green bowl with the right gripper -- Pour the coffee beans into the pink bowl with the left gripper -- Pour the shrimp into the pink bowl with the right gripper -- Pour the coffee beans into the pink bowl with the right gripper -- Grasp the glass of shrimp with the right gripper -- Place the glass cup with the left gripper -- Pour the coffee beans into the green bowl with the left gripper -- 'null' +- subtask: Abnormal + subtask_index: 0 +- subtask: Grasp the glass of shrimp with the left gripper + subtask_index: 1 +- subtask: Pour the shrimp beans into the green bowl with the left gripper + subtask_index: 2 +- subtask: Pour the shrimp beans into the green bowl with the right gripper + subtask_index: 3 +- subtask: Grasp the glass of coffee beans with the right gripper + subtask_index: 4 +- subtask: Grasp the glass of coffee beans with the left gripper + subtask_index: 5 +- subtask: Place the glass cup with the right gripper + subtask_index: 6 +- subtask: Pour the shrimp into the pink bowl with the left gripper + subtask_index: 7 +- subtask: End + subtask_index: 8 +- subtask: Pour the coffee beans into the green bowl with the right gripper + subtask_index: 9 +- subtask: Pour the coffee beans into the pink bowl with the left gripper + subtask_index: 10 +- subtask: Pour the shrimp into the pink bowl with the right gripper + subtask_index: 11 +- subtask: Pour the coffee beans into the pink bowl with the right gripper + subtask_index: 12 +- subtask: Grasp the glass of shrimp with the right gripper + subtask_index: 13 +- subtask: Place the glass cup with the left gripper + subtask_index: 14 +- subtask: Pour the coffee beans into the green bowl with the left gripper + subtask_index: 15 +- subtask: 'null' + subtask_index: 16 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -109,13 +126,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -123,8 +137,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 85 total_frames: 31250 fps: 30 @@ -211,11 +224,9 @@ data_structure: 'Galaxea_R1_Lite_pour_solid_marble_bar_counter_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:84 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -483,7 +494,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -491,7 +502,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -518,214 +528,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_pour_solid_marble_bar_counter - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: househhold - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick up the cup and pour the solid into a bowl or tray. - sub_tasks: - - subtask: Abnormal - subtask_index: 0 - - subtask: Grasp the glass of shrimp with the left gripper - subtask_index: 1 - - subtask: Pour the shrimp beans into the green bowl with the left gripper - subtask_index: 2 - - subtask: Pour the shrimp beans into the green bowl with the right gripper - subtask_index: 3 - - subtask: Grasp the glass of coffee beans with the right gripper - subtask_index: 4 - - subtask: Grasp the glass of coffee beans with the left gripper - subtask_index: 5 - - subtask: Place the glass cup with the right gripper - subtask_index: 6 - - subtask: Pour the shrimp into the pink bowl with the left gripper - subtask_index: 7 - - subtask: End - subtask_index: 8 - - subtask: Pour the coffee beans into the green bowl with the right gripper - subtask_index: 9 - - subtask: Pour the coffee beans into the pink bowl with the left gripper - subtask_index: 10 - - subtask: Pour the shrimp into the pink bowl with the right gripper - subtask_index: 11 - - subtask: Pour the coffee beans into the pink bowl with the right gripper - subtask_index: 12 - - subtask: Grasp the glass of shrimp with the right gripper - subtask_index: 13 - - subtask: Place the glass cup with the left gripper - subtask_index: 14 - - subtask: Pour the coffee beans into the green bowl with the left gripper - subtask_index: 15 - - subtask: 'null' - subtask_index: 16 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 31250 - dataset_size: 1.29 GB - data_structure: 'Galaxea_R1_Lite_pour_solid_marble_bar_counter_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (73 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_pour_water.yaml b/dataset_info/Galaxea_R1_Lite_pour_water.yaml index 7aa8525856cbab6dee0d1c06f465f604ebd91fd3..f35703ecc2e11753e0753dd3d60af0b9b53f3c34 100644 --- a/dataset_info/Galaxea_R1_Lite_pour_water.yaml +++ b/dataset_info/Galaxea_R1_Lite_pour_water.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_table level1: furniture level2: white_table @@ -51,70 +51,117 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the gripper to pour the water into the other two cups table. +task_instruction: +- use the gripper to pour the water into the other two cups table. sub_tasks: -- Pick up the gray plastic cup with left gripper -- Pour water from blue cup to white cup with the left gripper -- Pour water from white cup to blue cup with the right gripper -- Grasp the green cup with the left gripper -- Place the blue plastic cup on the table with left gripper -- Pour water from blue cup to green cup with the right gripper -- Pour water from white cup to green cup with the right gripper -- Left gripper -- Pour water into the green plastic cup with right gripper -- Pour water from blue cup to green cup with the left gripper -- Grasp the blue cup with the left gripper -- Pick up the green plastic cup with right gripper -- Pour water into the blue plastic cup with left gripper -- Place the white cup with the right gripper -- Grasp the white cup with the right gripper -- Pick up the blue plastic cup with right gripper -- Place the white cup with the left gripper -- Place the blue cup with the right gripper -- Place the gray plastic cup on the table with left gripper -- Pick up the green plastic cup with left gripper -- Pour water into the blue plastic cup with right gripper -- Pick up the gray plastic cup with right gripper -- Grasp the green cup with the right gripper -- Pick up the blue plastic cup with left gripper -- Grasp the white cup with the left gripper -- Pour water from white cup to green cup with the left gripper -- Pour water from blue cup to white cup with the right gripper -- Place the blue plastic cup on the table with right gripper -- Place the gray plastic cup on the table with right gripper -- Pour water from white cup to blue cup with the left gripper -- Right gripper -- Place the green plastic cup on the table with left gripper -- Pour water from green cup to blue cup with the right gripper -- Place the green plastic cup on the table with right gripper -- Grasp the blue cup with the right gripper -- Pour water from green cup to blue cup with the left gripper -- Place the green cup with the right gripper -- Place the green cup with the left gripper -- End -- Pour water from green cup to white cup with the right gripper -- Place the blue cup with the left gripper -- Pour water from green cup to white cup with the left gripper -- Pour water into the green plastic cup with left gripper -- Pour water into the gray plastic cup with right gripper -- Pour water into the gray plastic cup with left gripper -- 'null' +- subtask: Pick up the gray plastic cup with left gripper + subtask_index: 0 +- subtask: Pour water from blue cup to white cup with the left gripper + subtask_index: 1 +- subtask: Pour water from white cup to blue cup with the right gripper + subtask_index: 2 +- subtask: Grasp the green cup with the left gripper + subtask_index: 3 +- subtask: Place the blue plastic cup on the table with left gripper + subtask_index: 4 +- subtask: Pour water from blue cup to green cup with the right gripper + subtask_index: 5 +- subtask: Pour water from white cup to green cup with the right gripper + subtask_index: 6 +- subtask: Left gripper + subtask_index: 7 +- subtask: Pour water into the green plastic cup with right gripper + subtask_index: 8 +- subtask: Pour water from blue cup to green cup with the left gripper + subtask_index: 9 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 10 +- subtask: Pick up the green plastic cup with right gripper + subtask_index: 11 +- subtask: Pour water into the blue plastic cup with left gripper + subtask_index: 12 +- subtask: Place the white cup with the right gripper + subtask_index: 13 +- subtask: Grasp the white cup with the right gripper + subtask_index: 14 +- subtask: Pick up the blue plastic cup with right gripper + subtask_index: 15 +- subtask: Place the white cup with the left gripper + subtask_index: 16 +- subtask: Place the blue cup with the right gripper + subtask_index: 17 +- subtask: Place the gray plastic cup on the table with left gripper + subtask_index: 18 +- subtask: Pick up the green plastic cup with left gripper + subtask_index: 19 +- subtask: Pour water into the blue plastic cup with right gripper + subtask_index: 20 +- subtask: Pick up the gray plastic cup with right gripper + subtask_index: 21 +- subtask: Grasp the green cup with the right gripper + subtask_index: 22 +- subtask: Pick up the blue plastic cup with left gripper + subtask_index: 23 +- subtask: Grasp the white cup with the left gripper + subtask_index: 24 +- subtask: Pour water from white cup to green cup with the left gripper + subtask_index: 25 +- subtask: Pour water from blue cup to white cup with the right gripper + subtask_index: 26 +- subtask: Place the blue plastic cup on the table with right gripper + subtask_index: 27 +- subtask: Place the gray plastic cup on the table with right gripper + subtask_index: 28 +- subtask: Pour water from white cup to blue cup with the left gripper + subtask_index: 29 +- subtask: Right gripper + subtask_index: 30 +- subtask: Place the green plastic cup on the table with left gripper + subtask_index: 31 +- subtask: Pour water from green cup to blue cup with the right gripper + subtask_index: 32 +- subtask: Place the green plastic cup on the table with right gripper + subtask_index: 33 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 34 +- subtask: Pour water from green cup to blue cup with the left gripper + subtask_index: 35 +- subtask: Place the green cup with the right gripper + subtask_index: 36 +- subtask: Place the green cup with the left gripper + subtask_index: 37 +- subtask: End + subtask_index: 38 +- subtask: Pour water from green cup to white cup with the right gripper + subtask_index: 39 +- subtask: Place the blue cup with the left gripper + subtask_index: 40 +- subtask: Pour water from green cup to white cup with the left gripper + subtask_index: 41 +- subtask: Pour water into the green plastic cup with left gripper + subtask_index: 42 +- subtask: Pour water into the gray plastic cup with right gripper + subtask_index: 43 +- subtask: Pour water into the gray plastic cup with left gripper + subtask_index: 44 +- subtask: 'null' + subtask_index: 45 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -125,13 +172,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -139,8 +183,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 43201 fps: 30 @@ -227,11 +270,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_water_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -499,7 +540,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -507,7 +548,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -534,272 +574,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_pour_water - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the gripper to pour the water into the other two cups table. - sub_tasks: - - subtask: Pick up the gray plastic cup with left gripper - subtask_index: 0 - - subtask: Pour water from blue cup to white cup with the left gripper - subtask_index: 1 - - subtask: Pour water from white cup to blue cup with the right gripper - subtask_index: 2 - - subtask: Grasp the green cup with the left gripper - subtask_index: 3 - - subtask: Place the blue plastic cup on the table with left gripper - subtask_index: 4 - - subtask: Pour water from blue cup to green cup with the right gripper - subtask_index: 5 - - subtask: Pour water from white cup to green cup with the right gripper - subtask_index: 6 - - subtask: Left gripper - subtask_index: 7 - - subtask: Pour water into the green plastic cup with right gripper - subtask_index: 8 - - subtask: Pour water from blue cup to green cup with the left gripper - subtask_index: 9 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 10 - - subtask: Pick up the green plastic cup with right gripper - subtask_index: 11 - - subtask: Pour water into the blue plastic cup with left gripper - subtask_index: 12 - - subtask: Place the white cup with the right gripper - subtask_index: 13 - - subtask: Grasp the white cup with the right gripper - subtask_index: 14 - - subtask: Pick up the blue plastic cup with right gripper - subtask_index: 15 - - subtask: Place the white cup with the left gripper - subtask_index: 16 - - subtask: Place the blue cup with the right gripper - subtask_index: 17 - - subtask: Place the gray plastic cup on the table with left gripper - subtask_index: 18 - - subtask: Pick up the green plastic cup with left gripper - subtask_index: 19 - - subtask: Pour water into the blue plastic cup with right gripper - subtask_index: 20 - - subtask: Pick up the gray plastic cup with right gripper - subtask_index: 21 - - subtask: Grasp the green cup with the right gripper - subtask_index: 22 - - subtask: Pick up the blue plastic cup with left gripper - subtask_index: 23 - - subtask: Grasp the white cup with the left gripper - subtask_index: 24 - - subtask: Pour water from white cup to green cup with the left gripper - subtask_index: 25 - - subtask: Pour water from blue cup to white cup with the right gripper - subtask_index: 26 - - subtask: Place the blue plastic cup on the table with right gripper - subtask_index: 27 - - subtask: Place the gray plastic cup on the table with right gripper - subtask_index: 28 - - subtask: Pour water from white cup to blue cup with the left gripper - subtask_index: 29 - - subtask: Right gripper - subtask_index: 30 - - subtask: Place the green plastic cup on the table with left gripper - subtask_index: 31 - - subtask: Pour water from green cup to blue cup with the right gripper - subtask_index: 32 - - subtask: Place the green plastic cup on the table with right gripper - subtask_index: 33 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 34 - - subtask: Pour water from green cup to blue cup with the left gripper - subtask_index: 35 - - subtask: Place the green cup with the right gripper - subtask_index: 36 - - subtask: Place the green cup with the left gripper - subtask_index: 37 - - subtask: End - subtask_index: 38 - - subtask: Pour water from green cup to white cup with the right gripper - subtask_index: 39 - - subtask: Place the blue cup with the left gripper - subtask_index: 40 - - subtask: Pour water from green cup to white cup with the left gripper - subtask_index: 41 - - subtask: Pour water into the green plastic cup with left gripper - subtask_index: 42 - - subtask: Pour water into the gray plastic cup with right gripper - subtask_index: 43 - - subtask: Pour water into the gray plastic cup with left gripper - subtask_index: 44 - - subtask: 'null' - subtask_index: 45 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 43201 - dataset_size: 932.69 MB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_water_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_pour_water_black_tablecloth.yaml b/dataset_info/Galaxea_R1_Lite_pour_water_black_tablecloth.yaml index 908d93453c90bd494f2d87dc20a30970811350e1..748984fc31faa1e0ed611dad1e70ea516b95998e 100644 --- a/dataset_info/Galaxea_R1_Lite_pour_water_black_tablecloth.yaml +++ b/dataset_info/Galaxea_R1_Lite_pour_water_black_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: black_table_cloth level1: table_cloths level2: black_table_cloth @@ -51,69 +51,113 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the gripper to pour the water into the other two cups on black - table. +task_instruction: +- use the gripper to pour the water into the other two cups on black table. sub_tasks: -- Pick up the gray plastic cup with left gripper -- Pour water from blue cup to white cup with the left gripper -- Pour water from white cup to blue cup with the right gripper -- Grasp the green cup with the left gripper -- Place the blue plastic cup on the table with left gripper -- Pour water from blue cup to green cup with the right gripper -- Pour water from white cup to green cup with the right gripper -- Left gripper -- Pick up the green plastic cup with right gripper -- Pour water from blue cup to green cup with the left gripper -- Grasp the blue cup with the left gripper -- Pour water into the green plastic cup with right gripper -- Pour water into the blue plastic cup with left gripper -- Place the white cup with the right gripper -- Grasp the white cup with the right gripper -- Pick up the blue plastic cup with right gripper -- Place the white cup with the left gripper -- Place the blue cup with the right gripper -- Place the gray plastic cup on the table with left gripper -- Pick up the green plastic cup with left gripper -- Pour water into the blue plastic cup with right gripper -- Grasp the green cup with the right gripper -- Pick up the blue plastic cup with left gripper -- Grasp the white cup with the left gripper -- Pour water from white cup to green cup with the left gripper -- Pour water from blue cup to white cup with the right gripper -- Place the blue plastic cup on the table with right gripper -- Pour water from white cup to blue cup with the left gripper -- Pour water into the green plastic cup with left gripper -- Pour water from green cup to blue cup with the right gripper -- Place the green plastic cup on the table with left gripper -- Place the green plastic cup on the table with right gripper -- Grasp the blue cup with the right gripper -- Place the green cup with the right gripper -- Pour water from green cup to blue cup with the left gripper -- Place the green cup with the left gripper -- End -- Place the blue cup with the left gripper -- Pour water from green cup to white cup with the left gripper -- Right gripper -- Pour water into the gray plastic cup with right gripper -- Pour water into the gray plastic cup with left gripper -- Pour water from green cup to white cup with the right gripper -- 'null' +- subtask: Pick up the gray plastic cup with left gripper + subtask_index: 0 +- subtask: Pour water from blue cup to white cup with the left gripper + subtask_index: 1 +- subtask: Pour water from white cup to blue cup with the right gripper + subtask_index: 2 +- subtask: Grasp the green cup with the left gripper + subtask_index: 3 +- subtask: Place the blue plastic cup on the table with left gripper + subtask_index: 4 +- subtask: Pour water from blue cup to green cup with the right gripper + subtask_index: 5 +- subtask: Pour water from white cup to green cup with the right gripper + subtask_index: 6 +- subtask: Left gripper + subtask_index: 7 +- subtask: Pick up the green plastic cup with right gripper + subtask_index: 8 +- subtask: Pour water from blue cup to green cup with the left gripper + subtask_index: 9 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 10 +- subtask: Pour water into the green plastic cup with right gripper + subtask_index: 11 +- subtask: Pour water into the blue plastic cup with left gripper + subtask_index: 12 +- subtask: Place the white cup with the right gripper + subtask_index: 13 +- subtask: Grasp the white cup with the right gripper + subtask_index: 14 +- subtask: Pick up the blue plastic cup with right gripper + subtask_index: 15 +- subtask: Place the white cup with the left gripper + subtask_index: 16 +- subtask: Place the blue cup with the right gripper + subtask_index: 17 +- subtask: Place the gray plastic cup on the table with left gripper + subtask_index: 18 +- subtask: Pick up the green plastic cup with left gripper + subtask_index: 19 +- subtask: Pour water into the blue plastic cup with right gripper + subtask_index: 20 +- subtask: Grasp the green cup with the right gripper + subtask_index: 21 +- subtask: Pick up the blue plastic cup with left gripper + subtask_index: 22 +- subtask: Grasp the white cup with the left gripper + subtask_index: 23 +- subtask: Pour water from white cup to green cup with the left gripper + subtask_index: 24 +- subtask: Pour water from blue cup to white cup with the right gripper + subtask_index: 25 +- subtask: Place the blue plastic cup on the table with right gripper + subtask_index: 26 +- subtask: Pour water from white cup to blue cup with the left gripper + subtask_index: 27 +- subtask: Pour water into the green plastic cup with left gripper + subtask_index: 28 +- subtask: Pour water from green cup to blue cup with the right gripper + subtask_index: 29 +- subtask: Place the green plastic cup on the table with left gripper + subtask_index: 30 +- subtask: Place the green plastic cup on the table with right gripper + subtask_index: 31 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 32 +- subtask: Place the green cup with the right gripper + subtask_index: 33 +- subtask: Pour water from green cup to blue cup with the left gripper + subtask_index: 34 +- subtask: Place the green cup with the left gripper + subtask_index: 35 +- subtask: End + subtask_index: 36 +- subtask: Place the blue cup with the left gripper + subtask_index: 37 +- subtask: Pour water from green cup to white cup with the left gripper + subtask_index: 38 +- subtask: Right gripper + subtask_index: 39 +- subtask: Pour water into the gray plastic cup with right gripper + subtask_index: 40 +- subtask: Pour water into the gray plastic cup with left gripper + subtask_index: 41 +- subtask: Pour water from green cup to white cup with the right gripper + subtask_index: 42 +- subtask: 'null' + subtask_index: 43 atomic_actions: - grasp - pick - place - pour -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -124,13 +168,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -138,8 +179,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 42644 fps: 30 @@ -226,11 +266,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_water_black_tablecloth_qce |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -498,7 +536,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -506,7 +544,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -533,268 +570,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_pour_water_black_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the gripper to pour the water into the other two cups on black table. - sub_tasks: - - subtask: Pick up the gray plastic cup with left gripper - subtask_index: 0 - - subtask: Pour water from blue cup to white cup with the left gripper - subtask_index: 1 - - subtask: Pour water from white cup to blue cup with the right gripper - subtask_index: 2 - - subtask: Grasp the green cup with the left gripper - subtask_index: 3 - - subtask: Place the blue plastic cup on the table with left gripper - subtask_index: 4 - - subtask: Pour water from blue cup to green cup with the right gripper - subtask_index: 5 - - subtask: Pour water from white cup to green cup with the right gripper - subtask_index: 6 - - subtask: Left gripper - subtask_index: 7 - - subtask: Pick up the green plastic cup with right gripper - subtask_index: 8 - - subtask: Pour water from blue cup to green cup with the left gripper - subtask_index: 9 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 10 - - subtask: Pour water into the green plastic cup with right gripper - subtask_index: 11 - - subtask: Pour water into the blue plastic cup with left gripper - subtask_index: 12 - - subtask: Place the white cup with the right gripper - subtask_index: 13 - - subtask: Grasp the white cup with the right gripper - subtask_index: 14 - - subtask: Pick up the blue plastic cup with right gripper - subtask_index: 15 - - subtask: Place the white cup with the left gripper - subtask_index: 16 - - subtask: Place the blue cup with the right gripper - subtask_index: 17 - - subtask: Place the gray plastic cup on the table with left gripper - subtask_index: 18 - - subtask: Pick up the green plastic cup with left gripper - subtask_index: 19 - - subtask: Pour water into the blue plastic cup with right gripper - subtask_index: 20 - - subtask: Grasp the green cup with the right gripper - subtask_index: 21 - - subtask: Pick up the blue plastic cup with left gripper - subtask_index: 22 - - subtask: Grasp the white cup with the left gripper - subtask_index: 23 - - subtask: Pour water from white cup to green cup with the left gripper - subtask_index: 24 - - subtask: Pour water from blue cup to white cup with the right gripper - subtask_index: 25 - - subtask: Place the blue plastic cup on the table with right gripper - subtask_index: 26 - - subtask: Pour water from white cup to blue cup with the left gripper - subtask_index: 27 - - subtask: Pour water into the green plastic cup with left gripper - subtask_index: 28 - - subtask: Pour water from green cup to blue cup with the right gripper - subtask_index: 29 - - subtask: Place the green plastic cup on the table with left gripper - subtask_index: 30 - - subtask: Place the green plastic cup on the table with right gripper - subtask_index: 31 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 32 - - subtask: Place the green cup with the right gripper - subtask_index: 33 - - subtask: Pour water from green cup to blue cup with the left gripper - subtask_index: 34 - - subtask: Place the green cup with the left gripper - subtask_index: 35 - - subtask: End - subtask_index: 36 - - subtask: Place the blue cup with the left gripper - subtask_index: 37 - - subtask: Pour water from green cup to white cup with the left gripper - subtask_index: 38 - - subtask: Right gripper - subtask_index: 39 - - subtask: Pour water into the gray plastic cup with right gripper - subtask_index: 40 - - subtask: Pour water into the gray plastic cup with left gripper - subtask_index: 41 - - subtask: Pour water from green cup to white cup with the right gripper - subtask_index: 42 - - subtask: 'null' - subtask_index: 43 - atomic_actions: - - grasp - - pick - - place - - pour - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 42644 - dataset_size: 1.43 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_pour_water_black_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_blue_plate.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_blue_plate.yaml index b6e05fb231e44999986477cf05de4aee28ecdb89..a5bd60f64feffae089c8d290b5db5484bb0e0fe1 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_blue_plate.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_blue_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: blue_plate level1: plates level2: blue_plate @@ -195,109 +195,196 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the blue plate. +task_instruction: +- use a gripper to pick the target object and place on the blue plate. sub_tasks: -- Place the shower sphere on the blue plate with the left gripper -- Place the round wooden block on the blue plate with the right gripper -- Grasp the blue pot with the left gripper -- Grasp the plugboard with the left gripper -- Place the soft facial cleanser on the blue plate with the right gripper -- Place the potato chips on the blue plate with the right gripper -- Grasp the Itchy scratcher with the right gripper -- Place the duck toy on the blue plate with the right gripper -- Grasp the potato chips with the right gripper -- Place the tin on the blue plate with the left gripper -- Grasp the banana with the left gripper -- Place the chocolate cake on the blue plate with the right gripper -- Place the blue cup on the blue plate with the right gripper -- Place the blue pot on the blue plate with the left gripper -- Place the tape on the blue plate with the left gripper -- Grasp the compasses with the right gripper -- Grasp the duck toy with the left gripper -- Place the blackboard erasure on the blue plate with the right gripper -- Place the tin on the blue plate with the right gripper -- Grasp the blue cup with the left gripper -- Grasp the square chewing gum with the left gripper -- Grasp the chocolate cake with the right gripper -- Grasp the shower sphere with the left gripper -- Place the compasses on the blue plate with the right gripper -- Grasp the plugboard with the right gripper -- Grasp the yogurt with the right gripper -- Place the round wooden block on the blue plate with the left gripper -- Place the banana on the blue plate with the left gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Place the Itchy scratcher on the blue plate with the right gripper -- Grasp the chocolate with the right gripper -- Grasp the peach with the right gripper -- Grasp the brown towel with the right gripper -- Place the coke on the blue plate with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Place the compasses on the blue plate with the left gripper -- Place the hard facial cleanser on the blue plate with the left gripper -- Place the yogurt on the blue plate with the right gripper -- Grasp the peach with the left gripper -- Place the square wooden block on the blue plate with the right gripper -- Grasp the duck toy with the right gripper -- End -- Place the duck toy on the blue plate with the left gripper -- Grasp the coke with the left gripper -- Grasp the round wooden block with the left gripper -- Place the round bread on the blue plate with the left gripper -- Place the plugboard on the blue plate with the left gripper -- Grasp the compasses with the left gripper -- Place the green lemon on the blue plate with the left gripper -- Place the peach on the blue plate with the right gripper -- Place the chocolate on the blue plate with the right gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the hard facial cleanser on the blue plate with the right gripper -- Place the brown towel on the blue plate with the right gripper -- Grasp the tin with the right gripper -- Place the blue cup on the blue plate with the left gripper -- Grasp the tape with the right gripper -- Grasp the coke with the right gripper -- Grasp the tape with the left gripper -- Grasp the shower sphere with the right gripper -- Place the banana on the blue plate with the right gripper -- Place the shower sphere on the blue plate with the right gripper -- Grasp the blackboard erasure with the right gripper -- Place the square chewing gum on the blue plate with the left gripper -- Place the plugboard on the blue plate with the right gripper -- Place the round bread on the blue plate with the right gripper -- Place the chocolate cake on the blue plate with the left gripper -- Grasp the round bread with the left gripper -- Place the brown towel on the blue plate with the left gripper -- Grasp the square wooden block with the left gripper -- Place the coke on the blue plate with the right gripper -- Grasp the blue cup with the right gripper -- Grasp the soft facial cleanser with the right gripper -- Place the tape on the blue plate with the right gripper -- Grasp the square wooden block with the right gripper -- Grasp the green lemon with the left gripper -- Place the square wooden block on the blue plate with the left gripper -- Place the peach on the blue plate with the left gripper -- Grasp the round wooden block with the right gripper -- Place the blue pot on the blue plate with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Place the shower sphere on the blue plate with the left gripper + subtask_index: 0 +- subtask: Place the round wooden block on the blue plate with the right gripper + subtask_index: 1 +- subtask: Grasp the blue pot with the left gripper + subtask_index: 2 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 3 +- subtask: Place the soft facial cleanser on the blue plate with the right gripper + subtask_index: 4 +- subtask: Place the potato chips on the blue plate with the right gripper + subtask_index: 5 +- subtask: Grasp the Itchy scratcher with the right gripper + subtask_index: 6 +- subtask: Place the duck toy on the blue plate with the right gripper + subtask_index: 7 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 8 +- subtask: Place the tin on the blue plate with the left gripper + subtask_index: 9 +- subtask: Grasp the banana with the left gripper + subtask_index: 10 +- subtask: Place the chocolate cake on the blue plate with the right gripper + subtask_index: 11 +- subtask: Place the blue cup on the blue plate with the right gripper + subtask_index: 12 +- subtask: Place the blue pot on the blue plate with the left gripper + subtask_index: 13 +- subtask: Place the tape on the blue plate with the left gripper + subtask_index: 14 +- subtask: Grasp the compasses with the right gripper + subtask_index: 15 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 16 +- subtask: Place the blackboard erasure on the blue plate with the right gripper + subtask_index: 17 +- subtask: Place the tin on the blue plate with the right gripper + subtask_index: 18 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 19 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 20 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 21 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 22 +- subtask: Place the compasses on the blue plate with the right gripper + subtask_index: 23 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 24 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 25 +- subtask: Place the round wooden block on the blue plate with the left gripper + subtask_index: 26 +- subtask: Place the banana on the blue plate with the left gripper + subtask_index: 27 +- subtask: Grasp the tin with the left gripper + subtask_index: 28 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 29 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 30 +- subtask: Place the Itchy scratcher on the blue plate with the right gripper + subtask_index: 31 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 32 +- subtask: Grasp the peach with the right gripper + subtask_index: 33 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 34 +- subtask: Place the coke on the blue plate with the left gripper + subtask_index: 35 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 36 +- subtask: Place the compasses on the blue plate with the left gripper + subtask_index: 37 +- subtask: Place the hard facial cleanser on the blue plate with the left gripper + subtask_index: 38 +- subtask: Place the yogurt on the blue plate with the right gripper + subtask_index: 39 +- subtask: Grasp the peach with the left gripper + subtask_index: 40 +- subtask: Place the square wooden block on the blue plate with the right gripper + subtask_index: 41 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 42 +- subtask: End + subtask_index: 43 +- subtask: Place the duck toy on the blue plate with the left gripper + subtask_index: 44 +- subtask: Grasp the coke with the left gripper + subtask_index: 45 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 46 +- subtask: Place the round bread on the blue plate with the left gripper + subtask_index: 47 +- subtask: Place the plugboard on the blue plate with the left gripper + subtask_index: 48 +- subtask: Grasp the compasses with the left gripper + subtask_index: 49 +- subtask: Place the green lemon on the blue plate with the left gripper + subtask_index: 50 +- subtask: Place the peach on the blue plate with the right gripper + subtask_index: 51 +- subtask: Place the chocolate on the blue plate with the right gripper + subtask_index: 52 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 53 +- subtask: Grasp the round bread with the right gripper + subtask_index: 54 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 55 +- subtask: Place the hard facial cleanser on the blue plate with the right gripper + subtask_index: 56 +- subtask: Place the brown towel on the blue plate with the right gripper + subtask_index: 57 +- subtask: Grasp the tin with the right gripper + subtask_index: 58 +- subtask: Place the blue cup on the blue plate with the left gripper + subtask_index: 59 +- subtask: Grasp the tape with the right gripper + subtask_index: 60 +- subtask: Grasp the coke with the right gripper + subtask_index: 61 +- subtask: Grasp the tape with the left gripper + subtask_index: 62 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 63 +- subtask: Place the banana on the blue plate with the right gripper + subtask_index: 64 +- subtask: Place the shower sphere on the blue plate with the right gripper + subtask_index: 65 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 66 +- subtask: Place the square chewing gum on the blue plate with the left gripper + subtask_index: 67 +- subtask: Place the plugboard on the blue plate with the right gripper + subtask_index: 68 +- subtask: Place the round bread on the blue plate with the right gripper + subtask_index: 69 +- subtask: Place the chocolate cake on the blue plate with the left gripper + subtask_index: 70 +- subtask: Grasp the round bread with the left gripper + subtask_index: 71 +- subtask: Place the brown towel on the blue plate with the left gripper + subtask_index: 72 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 73 +- subtask: Place the coke on the blue plate with the right gripper + subtask_index: 74 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 75 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 76 +- subtask: Place the tape on the blue plate with the right gripper + subtask_index: 77 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 78 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 79 +- subtask: Place the square wooden block on the blue plate with the left gripper + subtask_index: 80 +- subtask: Place the peach on the blue plate with the left gripper + subtask_index: 81 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 82 +- subtask: Place the blue pot on the blue plate with the right gripper + subtask_index: 83 +- subtask: Grasp the banana with the right gripper + subtask_index: 84 +- subtask: 'null' + subtask_index: 85 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -308,13 +395,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -322,8 +406,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 19084 fps: 30 @@ -410,11 +493,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_blue_plate_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -682,7 +763,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -690,7 +771,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -717,351 +797,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_blue_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the blue plate. - sub_tasks: - - subtask: Place the shower sphere on the blue plate with the left gripper - subtask_index: 0 - - subtask: Place the round wooden block on the blue plate with the right gripper - subtask_index: 1 - - subtask: Grasp the blue pot with the left gripper - subtask_index: 2 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 3 - - subtask: Place the soft facial cleanser on the blue plate with the right gripper - subtask_index: 4 - - subtask: Place the potato chips on the blue plate with the right gripper - subtask_index: 5 - - subtask: Grasp the Itchy scratcher with the right gripper - subtask_index: 6 - - subtask: Place the duck toy on the blue plate with the right gripper - subtask_index: 7 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 8 - - subtask: Place the tin on the blue plate with the left gripper - subtask_index: 9 - - subtask: Grasp the banana with the left gripper - subtask_index: 10 - - subtask: Place the chocolate cake on the blue plate with the right gripper - subtask_index: 11 - - subtask: Place the blue cup on the blue plate with the right gripper - subtask_index: 12 - - subtask: Place the blue pot on the blue plate with the left gripper - subtask_index: 13 - - subtask: Place the tape on the blue plate with the left gripper - subtask_index: 14 - - subtask: Grasp the compasses with the right gripper - subtask_index: 15 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 16 - - subtask: Place the blackboard erasure on the blue plate with the right gripper - subtask_index: 17 - - subtask: Place the tin on the blue plate with the right gripper - subtask_index: 18 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 19 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 20 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 21 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 22 - - subtask: Place the compasses on the blue plate with the right gripper - subtask_index: 23 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 24 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 25 - - subtask: Place the round wooden block on the blue plate with the left gripper - subtask_index: 26 - - subtask: Place the banana on the blue plate with the left gripper - subtask_index: 27 - - subtask: Grasp the tin with the left gripper - subtask_index: 28 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 29 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 30 - - subtask: Place the Itchy scratcher on the blue plate with the right gripper - subtask_index: 31 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 32 - - subtask: Grasp the peach with the right gripper - subtask_index: 33 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 34 - - subtask: Place the coke on the blue plate with the left gripper - subtask_index: 35 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 36 - - subtask: Place the compasses on the blue plate with the left gripper - subtask_index: 37 - - subtask: Place the hard facial cleanser on the blue plate with the left gripper - subtask_index: 38 - - subtask: Place the yogurt on the blue plate with the right gripper - subtask_index: 39 - - subtask: Grasp the peach with the left gripper - subtask_index: 40 - - subtask: Place the square wooden block on the blue plate with the right gripper - subtask_index: 41 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 42 - - subtask: End - subtask_index: 43 - - subtask: Place the duck toy on the blue plate with the left gripper - subtask_index: 44 - - subtask: Grasp the coke with the left gripper - subtask_index: 45 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 46 - - subtask: Place the round bread on the blue plate with the left gripper - subtask_index: 47 - - subtask: Place the plugboard on the blue plate with the left gripper - subtask_index: 48 - - subtask: Grasp the compasses with the left gripper - subtask_index: 49 - - subtask: Place the green lemon on the blue plate with the left gripper - subtask_index: 50 - - subtask: Place the peach on the blue plate with the right gripper - subtask_index: 51 - - subtask: Place the chocolate on the blue plate with the right gripper - subtask_index: 52 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 53 - - subtask: Grasp the round bread with the right gripper - subtask_index: 54 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 55 - - subtask: Place the hard facial cleanser on the blue plate with the right gripper - subtask_index: 56 - - subtask: Place the brown towel on the blue plate with the right gripper - subtask_index: 57 - - subtask: Grasp the tin with the right gripper - subtask_index: 58 - - subtask: Place the blue cup on the blue plate with the left gripper - subtask_index: 59 - - subtask: Grasp the tape with the right gripper - subtask_index: 60 - - subtask: Grasp the coke with the right gripper - subtask_index: 61 - - subtask: Grasp the tape with the left gripper - subtask_index: 62 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 63 - - subtask: Place the banana on the blue plate with the right gripper - subtask_index: 64 - - subtask: Place the shower sphere on the blue plate with the right gripper - subtask_index: 65 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 66 - - subtask: Place the square chewing gum on the blue plate with the left gripper - subtask_index: 67 - - subtask: Place the plugboard on the blue plate with the right gripper - subtask_index: 68 - - subtask: Place the round bread on the blue plate with the right gripper - subtask_index: 69 - - subtask: Place the chocolate cake on the blue plate with the left gripper - subtask_index: 70 - - subtask: Grasp the round bread with the left gripper - subtask_index: 71 - - subtask: Place the brown towel on the blue plate with the left gripper - subtask_index: 72 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 73 - - subtask: Place the coke on the blue plate with the right gripper - subtask_index: 74 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 75 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 76 - - subtask: Place the tape on the blue plate with the right gripper - subtask_index: 77 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 78 - - subtask: Grasp the green lemon with the left gripper - subtask_index: 79 - - subtask: Place the square wooden block on the blue plate with the left gripper - subtask_index: 80 - - subtask: Place the peach on the blue plate with the left gripper - subtask_index: 81 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 82 - - subtask: Place the blue pot on the blue plate with the right gripper - subtask_index: 83 - - subtask: Grasp the banana with the right gripper - subtask_index: 84 - - subtask: 'null' - subtask_index: 85 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 19084 - dataset_size: 740.73 MB - data_structure: 'Galaxea_R1_Lite_storage_object_blue_plate_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_brown_basket.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_brown_basket.yaml index 2e1beb5981c54d480c0395bcab8501acb658d9f7..cd2c10883b557ce0e380d736d01de0b4bf765765 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_brown_basket.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_brown_basket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_basket level1: basket level2: brown_basket @@ -195,111 +195,200 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the brown basket. +task_instruction: +- use a gripper to pick the target object and place on the brown basket. sub_tasks: -- Grasp the blue pot with the left gripper -- Grasp the plugboard with the left gripper -- Place the potato chips into the basket with the left gripper -- Place the banana into the basket with the left gripper -- Place the coke into the basket with the left gripper -- Place the brown towel into the basket with the right gripper -- Place the peach slice into the basket with the right gripper -- Grasp the banana with the left gripper -- Place the round wooden block into the basket with the right gripper -- Place the duck toy into the basket with the right gripper -- Grasp the compasses with the right gripper -- Place the blackboard erasure into the basket with the left gripper -- Place the plugboard into the basket with the left gripper -- Place the square chewing gum into the basket with the right gripper -- Grasp the duck toy with the left gripper -- Grasp the cookie with the right gripper -- Grasp the blue cup with the left gripper -- Place the blackboard erasure into the basket with the right gripper -- Place the hard facial cleanser into the basket with the left gripper -- Place the round bread into the basket with the left gripper -- Place the square wooden block into the basket with the right gripper -- Grasp the soft facial cleanser with the left gripper -- Place the blue pot into the basket with the left gripper -- Grasp the peach slice with the right gripper -- Place the soft facial cleanser into the basket with the left gripper -- Grasp the square chewing gum with the left gripper -- Place the shower sphere into the basket with the left gripper -- Place the yogurt into the basket with the right gripper -- Grasp the chocolate cake with the right gripper -- Place the round wooden block into the basket with the left gripper -- Place the back scratcher into the basket with the left gripper -- Grasp the shower sphere with the left gripper -- Grasp the yogurt with the right gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Grasp the brown towel with the right gripper -- Grasp the back scratcher with the left gripper -- Place the hard facial cleanser into the basket with the right gripper -- Grasp the hard facial cleanser with the right gripper -- Place the green lemon into the basket with the right gripper -- Place the banana into the basket with the right gripper -- Place the compasses into the basket with the right gripper -- Place the compasses into the basket with the left gripper -- Grasp the green lemon with the right gripper -- Grasp the bread slice with the right gripper -- Place the duck toy into the basket with the left gripper -- Place the chocolate cake into the basket with the right gripper -- Place the shower sphere into the basket with the right gripper -- Grasp the potato chips with the left gripper -- Grasp the duck toy with the right gripper -- End -- Place the cookie into the basket with the right gripper -- Place the tape into the basket with the right gripper -- Grasp the blackboard erasure with the left gripper -- Place the bread slice into the basket with the left gripper -- Grasp the coke with the left gripper -- Grasp the round wooden block with the left gripper -- Place the tape into the basket with the left gripper -- Place the blue cup into the basket with the left gripper -- Grasp the compasses with the left gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the blue pot into the basket with the right gripper -- Grasp the tin with the right gripper -- Place the tin into the basket with the left gripper -- Grasp the tape with the right gripper -- Place the square chewing gum into the basket with the left gripper -- Place the bread slice into the basket with the right gripper -- Grasp the tape with the left gripper -- Place the blue cup into the basket with the right gripper -- Grasp the square chewing gum with the right gripper -- Grasp the shower sphere with the right gripper -- Place the chocolate cake into the basket with the left gripper -- Grasp the bread slice with the left gripper -- Grasp the blackboard erasure with the right gripper -- Place the round bread into the basket with the right gripper -- Grasp the round bread with the left gripper -- Grasp the square wooden block with the left gripper -- Place the brown towel into the basket with the left gripper -- Grasp the blue cup with the right gripper -- Grasp the square wooden block with the right gripper -- Place the square wooden block into the basket with the left gripper -- Place the tin into the basket with the right gripper -- Grasp the round wooden block with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Grasp the blue pot with the left gripper + subtask_index: 0 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 1 +- subtask: Place the potato chips into the basket with the left gripper + subtask_index: 2 +- subtask: Place the banana into the basket with the left gripper + subtask_index: 3 +- subtask: Place the coke into the basket with the left gripper + subtask_index: 4 +- subtask: Place the brown towel into the basket with the right gripper + subtask_index: 5 +- subtask: Place the peach slice into the basket with the right gripper + subtask_index: 6 +- subtask: Grasp the banana with the left gripper + subtask_index: 7 +- subtask: Place the round wooden block into the basket with the right gripper + subtask_index: 8 +- subtask: Place the duck toy into the basket with the right gripper + subtask_index: 9 +- subtask: Grasp the compasses with the right gripper + subtask_index: 10 +- subtask: Place the blackboard erasure into the basket with the left gripper + subtask_index: 11 +- subtask: Place the plugboard into the basket with the left gripper + subtask_index: 12 +- subtask: Place the square chewing gum into the basket with the right gripper + subtask_index: 13 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 14 +- subtask: Grasp the cookie with the right gripper + subtask_index: 15 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 16 +- subtask: Place the blackboard erasure into the basket with the right gripper + subtask_index: 17 +- subtask: Place the hard facial cleanser into the basket with the left gripper + subtask_index: 18 +- subtask: Place the round bread into the basket with the left gripper + subtask_index: 19 +- subtask: Place the square wooden block into the basket with the right gripper + subtask_index: 20 +- subtask: Grasp the soft facial cleanser with the left gripper + subtask_index: 21 +- subtask: Place the blue pot into the basket with the left gripper + subtask_index: 22 +- subtask: Grasp the peach slice with the right gripper + subtask_index: 23 +- subtask: Place the soft facial cleanser into the basket with the left gripper + subtask_index: 24 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 25 +- subtask: Place the shower sphere into the basket with the left gripper + subtask_index: 26 +- subtask: Place the yogurt into the basket with the right gripper + subtask_index: 27 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 28 +- subtask: Place the round wooden block into the basket with the left gripper + subtask_index: 29 +- subtask: Place the back scratcher into the basket with the left gripper + subtask_index: 30 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 31 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 32 +- subtask: Grasp the tin with the left gripper + subtask_index: 33 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 34 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 35 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 36 +- subtask: Grasp the back scratcher with the left gripper + subtask_index: 37 +- subtask: Place the hard facial cleanser into the basket with the right gripper + subtask_index: 38 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 39 +- subtask: Place the green lemon into the basket with the right gripper + subtask_index: 40 +- subtask: Place the banana into the basket with the right gripper + subtask_index: 41 +- subtask: Place the compasses into the basket with the right gripper + subtask_index: 42 +- subtask: Place the compasses into the basket with the left gripper + subtask_index: 43 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 44 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 45 +- subtask: Place the duck toy into the basket with the left gripper + subtask_index: 46 +- subtask: Place the chocolate cake into the basket with the right gripper + subtask_index: 47 +- subtask: Place the shower sphere into the basket with the right gripper + subtask_index: 48 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 49 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 50 +- subtask: End + subtask_index: 51 +- subtask: Place the cookie into the basket with the right gripper + subtask_index: 52 +- subtask: Place the tape into the basket with the right gripper + subtask_index: 53 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 54 +- subtask: Place the bread slice into the basket with the left gripper + subtask_index: 55 +- subtask: Grasp the coke with the left gripper + subtask_index: 56 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 57 +- subtask: Place the tape into the basket with the left gripper + subtask_index: 58 +- subtask: Place the blue cup into the basket with the left gripper + subtask_index: 59 +- subtask: Grasp the compasses with the left gripper + subtask_index: 60 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 61 +- subtask: Grasp the round bread with the right gripper + subtask_index: 62 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 63 +- subtask: Place the blue pot into the basket with the right gripper + subtask_index: 64 +- subtask: Grasp the tin with the right gripper + subtask_index: 65 +- subtask: Place the tin into the basket with the left gripper + subtask_index: 66 +- subtask: Grasp the tape with the right gripper + subtask_index: 67 +- subtask: Place the square chewing gum into the basket with the left gripper + subtask_index: 68 +- subtask: Place the bread slice into the basket with the right gripper + subtask_index: 69 +- subtask: Grasp the tape with the left gripper + subtask_index: 70 +- subtask: Place the blue cup into the basket with the right gripper + subtask_index: 71 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 72 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 73 +- subtask: Place the chocolate cake into the basket with the left gripper + subtask_index: 74 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 75 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 76 +- subtask: Place the round bread into the basket with the right gripper + subtask_index: 77 +- subtask: Grasp the round bread with the left gripper + subtask_index: 78 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 79 +- subtask: Place the brown towel into the basket with the left gripper + subtask_index: 80 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 81 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 82 +- subtask: Place the square wooden block into the basket with the left gripper + subtask_index: 83 +- subtask: Place the tin into the basket with the right gripper + subtask_index: 84 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 85 +- subtask: Grasp the banana with the right gripper + subtask_index: 86 +- subtask: 'null' + subtask_index: 87 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -310,13 +399,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -324,8 +410,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 18209 fps: 30 @@ -412,11 +497,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_brown_basket_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -684,7 +767,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -692,7 +775,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -719,355 +801,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_brown_basket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the brown basket. - sub_tasks: - - subtask: Grasp the blue pot with the left gripper - subtask_index: 0 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 1 - - subtask: Place the potato chips into the basket with the left gripper - subtask_index: 2 - - subtask: Place the banana into the basket with the left gripper - subtask_index: 3 - - subtask: Place the coke into the basket with the left gripper - subtask_index: 4 - - subtask: Place the brown towel into the basket with the right gripper - subtask_index: 5 - - subtask: Place the peach slice into the basket with the right gripper - subtask_index: 6 - - subtask: Grasp the banana with the left gripper - subtask_index: 7 - - subtask: Place the round wooden block into the basket with the right gripper - subtask_index: 8 - - subtask: Place the duck toy into the basket with the right gripper - subtask_index: 9 - - subtask: Grasp the compasses with the right gripper - subtask_index: 10 - - subtask: Place the blackboard erasure into the basket with the left gripper - subtask_index: 11 - - subtask: Place the plugboard into the basket with the left gripper - subtask_index: 12 - - subtask: Place the square chewing gum into the basket with the right gripper - subtask_index: 13 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 14 - - subtask: Grasp the cookie with the right gripper - subtask_index: 15 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 16 - - subtask: Place the blackboard erasure into the basket with the right gripper - subtask_index: 17 - - subtask: Place the hard facial cleanser into the basket with the left gripper - subtask_index: 18 - - subtask: Place the round bread into the basket with the left gripper - subtask_index: 19 - - subtask: Place the square wooden block into the basket with the right gripper - subtask_index: 20 - - subtask: Grasp the soft facial cleanser with the left gripper - subtask_index: 21 - - subtask: Place the blue pot into the basket with the left gripper - subtask_index: 22 - - subtask: Grasp the peach slice with the right gripper - subtask_index: 23 - - subtask: Place the soft facial cleanser into the basket with the left gripper - subtask_index: 24 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 25 - - subtask: Place the shower sphere into the basket with the left gripper - subtask_index: 26 - - subtask: Place the yogurt into the basket with the right gripper - subtask_index: 27 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 28 - - subtask: Place the round wooden block into the basket with the left gripper - subtask_index: 29 - - subtask: Place the back scratcher into the basket with the left gripper - subtask_index: 30 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 31 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 32 - - subtask: Grasp the tin with the left gripper - subtask_index: 33 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 34 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 35 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 36 - - subtask: Grasp the back scratcher with the left gripper - subtask_index: 37 - - subtask: Place the hard facial cleanser into the basket with the right gripper - subtask_index: 38 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 39 - - subtask: Place the green lemon into the basket with the right gripper - subtask_index: 40 - - subtask: Place the banana into the basket with the right gripper - subtask_index: 41 - - subtask: Place the compasses into the basket with the right gripper - subtask_index: 42 - - subtask: Place the compasses into the basket with the left gripper - subtask_index: 43 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 44 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 45 - - subtask: Place the duck toy into the basket with the left gripper - subtask_index: 46 - - subtask: Place the chocolate cake into the basket with the right gripper - subtask_index: 47 - - subtask: Place the shower sphere into the basket with the right gripper - subtask_index: 48 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 49 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 50 - - subtask: End - subtask_index: 51 - - subtask: Place the cookie into the basket with the right gripper - subtask_index: 52 - - subtask: Place the tape into the basket with the right gripper - subtask_index: 53 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 54 - - subtask: Place the bread slice into the basket with the left gripper - subtask_index: 55 - - subtask: Grasp the coke with the left gripper - subtask_index: 56 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 57 - - subtask: Place the tape into the basket with the left gripper - subtask_index: 58 - - subtask: Place the blue cup into the basket with the left gripper - subtask_index: 59 - - subtask: Grasp the compasses with the left gripper - subtask_index: 60 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 61 - - subtask: Grasp the round bread with the right gripper - subtask_index: 62 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 63 - - subtask: Place the blue pot into the basket with the right gripper - subtask_index: 64 - - subtask: Grasp the tin with the right gripper - subtask_index: 65 - - subtask: Place the tin into the basket with the left gripper - subtask_index: 66 - - subtask: Grasp the tape with the right gripper - subtask_index: 67 - - subtask: Place the square chewing gum into the basket with the left gripper - subtask_index: 68 - - subtask: Place the bread slice into the basket with the right gripper - subtask_index: 69 - - subtask: Grasp the tape with the left gripper - subtask_index: 70 - - subtask: Place the blue cup into the basket with the right gripper - subtask_index: 71 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 72 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 73 - - subtask: Place the chocolate cake into the basket with the left gripper - subtask_index: 74 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 75 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 76 - - subtask: Place the round bread into the basket with the right gripper - subtask_index: 77 - - subtask: Grasp the round bread with the left gripper - subtask_index: 78 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 79 - - subtask: Place the brown towel into the basket with the left gripper - subtask_index: 80 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 81 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 82 - - subtask: Place the square wooden block into the basket with the left gripper - subtask_index: 83 - - subtask: Place the tin into the basket with the right gripper - subtask_index: 84 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 85 - - subtask: Grasp the banana with the right gripper - subtask_index: 86 - - subtask: 'null' - subtask_index: 87 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 18209 - dataset_size: 899.78 MB - data_structure: 'Galaxea_R1_Lite_storage_object_brown_basket_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml index bfe7cb0b91167a6b1fedfc96576471d4a6cfd488..151ab9b5c791e78ab48054af392f2258eea0361d 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_bowl level1: plastic_bowl level2: brown_bowl @@ -195,107 +195,192 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the brown bowl. +task_instruction: +- use a gripper to pick the target object and place on the brown bowl. sub_tasks: -- Grasp the blue pot with the left gripper -- Place the coke in the bowl with the left gripper -- Grasp the plugboard with the left gripper -- Place the back scratcher in the bowl with the right gripper -- Place the plugboard in the bowl with the right gripper -- Place the compasses in the bowl with the right gripper -- Grasp the potato chips with the right gripper -- Grasp the banana with the left gripper -- Place the square chewing gum in the bowl with the right gripper -- Place the round wooden block in the bowl with the right gripper -- Place the tin in the bowl with the left gripper -- Grasp the compasses with the right gripper -- Grasp the duck toy with the left gripper -- Place the potato chips in the bowl with the left gripper -- Grasp the blue cup with the left gripper -- Place the chocolate cake in the bowl with the right gripper -- Grasp the back scratcher with the right gripper -- Place the square wooden block in the bowl with the right gripper -- Grasp the chocolate cake with the right gripper -- Grasp the shower sphere with the left gripper -- Grasp the plugboard with the right gripper -- Grasp the yogurt with the right gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Place the square wooden block in the bowl with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Grasp the brown towel with the right gripper -- Place the potato chips in the bowl with the right gripper -- Place the duck toy in the bowl with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Place the green lemon in the bowl with the right gripper -- Grasp the peach with the left gripper -- Place the peach in the bowl with the left gripper -- Place the soft facial cleanser in the bowl with the right gripper -- Place the shower sphere in the bowl with the left gripper -- Place the banana in the bowl with the right gripper -- Place the shower sphere in the bowl with the right gripper -- Grasp the green lemon with the right gripper -- Place the brown towel in the bowl with the right gripper -- Place the blackboard erasure in the bowl with the left gripper -- Grasp the potato chips with the left gripper -- Grasp the duck toy with the right gripper -- End -- Place the blue cup in the bowl with the right gripper -- Grasp the blackboard erasure with the left gripper -- Grasp the coke with the left gripper -- Grasp the round wooden block with the left gripper -- Place the round wooden block in the bowl with the left gripper -- Place the banana in the bowl with the left gripper -- Place the chocolate cake in the bowl with the left gripper -- Place the tape in the bowl with the right gripper -- Grasp the compasses with the left gripper -- Place the chocolate in the bowl with the left gripper -- Grasp the blue pot with the right gripper -- Place the hard facial cleanser in the bowl with the left gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the hard facial cleanser in the bowl with the right gripper -- Place the round bread in the bowl with the right gripper -- Grasp the tape with the right gripper -- Place the tape in the bowl with the left gripper -- Grasp the coke with the right gripper -- Grasp the tape with the left gripper -- Place the duck toy in the bowl with the right gripper -- Grasp the square chewing gum with the right gripper -- Place the blue pot in the bowl with the left gripper -- Grasp the shower sphere with the right gripper -- Place the plugboard in the bowl with the left gripper -- Place the coke in the bowl with the right gripper -- Place the round bread in the bowl with the left gripper -- Place the blue cup in the bowl with the left gripper -- Place the blue pot in the bowl with the right gripper -- Grasp the round bread with the left gripper -- Grasp the chocolate with the left gripper -- Grasp the square wooden block with the left gripper -- Place the yogurt in the bowl with the right gripper -- Grasp the blue cup with the right gripper -- Grasp the soft facial cleanser with the right gripper -- Place the compasses in the bowl with the left gripper -- Grasp the square wooden block with the right gripper -- Place the brown towel in the bowl with the left gripper -- Grasp the round wooden block with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Grasp the blue pot with the left gripper + subtask_index: 0 +- subtask: Place the coke in the bowl with the left gripper + subtask_index: 1 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 2 +- subtask: Place the back scratcher in the bowl with the right gripper + subtask_index: 3 +- subtask: Place the plugboard in the bowl with the right gripper + subtask_index: 4 +- subtask: Place the compasses in the bowl with the right gripper + subtask_index: 5 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 6 +- subtask: Grasp the banana with the left gripper + subtask_index: 7 +- subtask: Place the square chewing gum in the bowl with the right gripper + subtask_index: 8 +- subtask: Place the round wooden block in the bowl with the right gripper + subtask_index: 9 +- subtask: Place the tin in the bowl with the left gripper + subtask_index: 10 +- subtask: Grasp the compasses with the right gripper + subtask_index: 11 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 12 +- subtask: Place the potato chips in the bowl with the left gripper + subtask_index: 13 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 14 +- subtask: Place the chocolate cake in the bowl with the right gripper + subtask_index: 15 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 16 +- subtask: Place the square wooden block in the bowl with the right gripper + subtask_index: 17 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 18 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 19 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 20 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 21 +- subtask: Grasp the tin with the left gripper + subtask_index: 22 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 23 +- subtask: Place the square wooden block in the bowl with the left gripper + subtask_index: 24 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 25 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 26 +- subtask: Place the potato chips in the bowl with the right gripper + subtask_index: 27 +- subtask: Place the duck toy in the bowl with the left gripper + subtask_index: 28 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 29 +- subtask: Place the green lemon in the bowl with the right gripper + subtask_index: 30 +- subtask: Grasp the peach with the left gripper + subtask_index: 31 +- subtask: Place the peach in the bowl with the left gripper + subtask_index: 32 +- subtask: Place the soft facial cleanser in the bowl with the right gripper + subtask_index: 33 +- subtask: Place the shower sphere in the bowl with the left gripper + subtask_index: 34 +- subtask: Place the banana in the bowl with the right gripper + subtask_index: 35 +- subtask: Place the shower sphere in the bowl with the right gripper + subtask_index: 36 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 37 +- subtask: Place the brown towel in the bowl with the right gripper + subtask_index: 38 +- subtask: Place the blackboard erasure in the bowl with the left gripper + subtask_index: 39 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 40 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 41 +- subtask: End + subtask_index: 42 +- subtask: Place the blue cup in the bowl with the right gripper + subtask_index: 43 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 44 +- subtask: Grasp the coke with the left gripper + subtask_index: 45 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 46 +- subtask: Place the round wooden block in the bowl with the left gripper + subtask_index: 47 +- subtask: Place the banana in the bowl with the left gripper + subtask_index: 48 +- subtask: Place the chocolate cake in the bowl with the left gripper + subtask_index: 49 +- subtask: Place the tape in the bowl with the right gripper + subtask_index: 50 +- subtask: Grasp the compasses with the left gripper + subtask_index: 51 +- subtask: Place the chocolate in the bowl with the left gripper + subtask_index: 52 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 53 +- subtask: Place the hard facial cleanser in the bowl with the left gripper + subtask_index: 54 +- subtask: Grasp the round bread with the right gripper + subtask_index: 55 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 56 +- subtask: Place the hard facial cleanser in the bowl with the right gripper + subtask_index: 57 +- subtask: Place the round bread in the bowl with the right gripper + subtask_index: 58 +- subtask: Grasp the tape with the right gripper + subtask_index: 59 +- subtask: Place the tape in the bowl with the left gripper + subtask_index: 60 +- subtask: Grasp the coke with the right gripper + subtask_index: 61 +- subtask: Grasp the tape with the left gripper + subtask_index: 62 +- subtask: Place the duck toy in the bowl with the right gripper + subtask_index: 63 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 64 +- subtask: Place the blue pot in the bowl with the left gripper + subtask_index: 65 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 66 +- subtask: Place the plugboard in the bowl with the left gripper + subtask_index: 67 +- subtask: Place the coke in the bowl with the right gripper + subtask_index: 68 +- subtask: Place the round bread in the bowl with the left gripper + subtask_index: 69 +- subtask: Place the blue cup in the bowl with the left gripper + subtask_index: 70 +- subtask: Place the blue pot in the bowl with the right gripper + subtask_index: 71 +- subtask: Grasp the round bread with the left gripper + subtask_index: 72 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 73 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 74 +- subtask: Place the yogurt in the bowl with the right gripper + subtask_index: 75 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 76 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 77 +- subtask: Place the compasses in the bowl with the left gripper + subtask_index: 78 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 79 +- subtask: Place the brown towel in the bowl with the left gripper + subtask_index: 80 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 81 +- subtask: Grasp the banana with the right gripper + subtask_index: 82 +- subtask: 'null' + subtask_index: 83 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -306,13 +391,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -320,8 +402,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 101 total_frames: 23706 fps: 30 @@ -408,11 +489,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_brown_bowl_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:100 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -680,7 +759,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -688,7 +767,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -715,347 +793,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_brown_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the brown bowl. - sub_tasks: - - subtask: Grasp the blue pot with the left gripper - subtask_index: 0 - - subtask: Place the coke in the bowl with the left gripper - subtask_index: 1 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 2 - - subtask: Place the back scratcher in the bowl with the right gripper - subtask_index: 3 - - subtask: Place the plugboard in the bowl with the right gripper - subtask_index: 4 - - subtask: Place the compasses in the bowl with the right gripper - subtask_index: 5 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 6 - - subtask: Grasp the banana with the left gripper - subtask_index: 7 - - subtask: Place the square chewing gum in the bowl with the right gripper - subtask_index: 8 - - subtask: Place the round wooden block in the bowl with the right gripper - subtask_index: 9 - - subtask: Place the tin in the bowl with the left gripper - subtask_index: 10 - - subtask: Grasp the compasses with the right gripper - subtask_index: 11 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 12 - - subtask: Place the potato chips in the bowl with the left gripper - subtask_index: 13 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 14 - - subtask: Place the chocolate cake in the bowl with the right gripper - subtask_index: 15 - - subtask: Grasp the back scratcher with the right gripper - subtask_index: 16 - - subtask: Place the square wooden block in the bowl with the right gripper - subtask_index: 17 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 18 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 19 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 20 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 21 - - subtask: Grasp the tin with the left gripper - subtask_index: 22 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 23 - - subtask: Place the square wooden block in the bowl with the left gripper - subtask_index: 24 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 25 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 26 - - subtask: Place the potato chips in the bowl with the right gripper - subtask_index: 27 - - subtask: Place the duck toy in the bowl with the left gripper - subtask_index: 28 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 29 - - subtask: Place the green lemon in the bowl with the right gripper - subtask_index: 30 - - subtask: Grasp the peach with the left gripper - subtask_index: 31 - - subtask: Place the peach in the bowl with the left gripper - subtask_index: 32 - - subtask: Place the soft facial cleanser in the bowl with the right gripper - subtask_index: 33 - - subtask: Place the shower sphere in the bowl with the left gripper - subtask_index: 34 - - subtask: Place the banana in the bowl with the right gripper - subtask_index: 35 - - subtask: Place the shower sphere in the bowl with the right gripper - subtask_index: 36 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 37 - - subtask: Place the brown towel in the bowl with the right gripper - subtask_index: 38 - - subtask: Place the blackboard erasure in the bowl with the left gripper - subtask_index: 39 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 40 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 41 - - subtask: End - subtask_index: 42 - - subtask: Place the blue cup in the bowl with the right gripper - subtask_index: 43 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 44 - - subtask: Grasp the coke with the left gripper - subtask_index: 45 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 46 - - subtask: Place the round wooden block in the bowl with the left gripper - subtask_index: 47 - - subtask: Place the banana in the bowl with the left gripper - subtask_index: 48 - - subtask: Place the chocolate cake in the bowl with the left gripper - subtask_index: 49 - - subtask: Place the tape in the bowl with the right gripper - subtask_index: 50 - - subtask: Grasp the compasses with the left gripper - subtask_index: 51 - - subtask: Place the chocolate in the bowl with the left gripper - subtask_index: 52 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 53 - - subtask: Place the hard facial cleanser in the bowl with the left gripper - subtask_index: 54 - - subtask: Grasp the round bread with the right gripper - subtask_index: 55 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 56 - - subtask: Place the hard facial cleanser in the bowl with the right gripper - subtask_index: 57 - - subtask: Place the round bread in the bowl with the right gripper - subtask_index: 58 - - subtask: Grasp the tape with the right gripper - subtask_index: 59 - - subtask: Place the tape in the bowl with the left gripper - subtask_index: 60 - - subtask: Grasp the coke with the right gripper - subtask_index: 61 - - subtask: Grasp the tape with the left gripper - subtask_index: 62 - - subtask: Place the duck toy in the bowl with the right gripper - subtask_index: 63 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 64 - - subtask: Place the blue pot in the bowl with the left gripper - subtask_index: 65 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 66 - - subtask: Place the plugboard in the bowl with the left gripper - subtask_index: 67 - - subtask: Place the coke in the bowl with the right gripper - subtask_index: 68 - - subtask: Place the round bread in the bowl with the left gripper - subtask_index: 69 - - subtask: Place the blue cup in the bowl with the left gripper - subtask_index: 70 - - subtask: Place the blue pot in the bowl with the right gripper - subtask_index: 71 - - subtask: Grasp the round bread with the left gripper - subtask_index: 72 - - subtask: Grasp the chocolate with the left gripper - subtask_index: 73 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 74 - - subtask: Place the yogurt in the bowl with the right gripper - subtask_index: 75 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 76 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 77 - - subtask: Place the compasses in the bowl with the left gripper - subtask_index: 78 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 79 - - subtask: Place the brown towel in the bowl with the left gripper - subtask_index: 80 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 81 - - subtask: Grasp the banana with the right gripper - subtask_index: 82 - - subtask: 'null' - subtask_index: 83 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 23706 - dataset_size: 909.34 MB - data_structure: 'Galaxea_R1_Lite_storage_object_brown_bowl_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (89 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml index 27709d70e90ddd87893f3c0a3678b8f22e75003b..b10fcc523af00e832830561386971604d201f272 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: brown_plate level1: plates level2: brown_plate @@ -195,113 +195,204 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the brown plate. +task_instruction: +- use a gripper to pick the target object and place on the brown plate. sub_tasks: -- Place the blue pot on the brown plate with the right gripper -- Grasp the blue pot with the left gripper -- Grasp the plugboard with the left gripper -- Place the soft facial cleanser on the brown plate with the right gripper -- Place the back scratcher on the brown plate with the right gripper -- Place the blackboard erasure on the brown plate with the left gripper -- Grasp the potato chips with the right gripper -- Grasp the banana with the left gripper -- Place the coke on the brown plate with the right gripper -- Place the chocolate on the brown plate with the left gripper -- Place the duck toy on the brown plate with the right gripper -- Grasp the compasses with the right gripper -- Place the peach on the brown plate with the left gripper -- Grasp the duck toy with the left gripper -- Place the round wooden block on the brown plate with the right gripper -- Grasp the blue cup with the left gripper -- Place the green lemon on the brown plate with the left gripper -- Place the shower sphere on the brown plate with the left gripper -- Place the shower sphere on the brown plate with the right gripper -- Grasp the back scratcher with the right gripper -- Grasp the square chewing gum with the left gripper -- Grasp the chocolate cake with the right gripper -- Place the yogurt on the brown plate with the right gripper -- Place the banana on the brown plate with the left gripper -- Grasp the shower sphere with the left gripper -- Place the brown towel on the brown plate with the left gripper -- Grasp the plugboard with the right gripper -- Grasp the yogurt with the right gripper -- Place the blue cup on the brown plate with the right gripper -- Place the plugboard on the brown plate with the right gripper -- Grasp the brown towel with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Place the duck toy on the brown plate with the left gripper -- Place the round bread on the brown plate with the right gripper -- Grasp the brown towel with the right gripper -- Place the bread slice on the brown plate with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Place the chocolate cake on the brown plate with the right gripper -- Grasp the peach with the left gripper -- Place the tin on the brown plate with the right gripper -- Place the tape on the brown plate with the right gripper -- Place the blackboard erasure on the brown plate with the right gripper -- Grasp the bread slice with the right gripper -- Place the potato chips on the brown plate with the right gripper -- Grasp the potato chips with the left gripper -- Place the tape on the brown plate with the left gripper -- Grasp the duck toy with the right gripper -- End -- Grasp the blackboard erasure with the left gripper -- Grasp the round wooden block with the left gripper -- Place the brown towel on the brown plate with the right gripper -- Place the blue cup on the brown plate with the left gripper -- Place the compasses on the brown plate with the right gripper -- Grasp the compasses with the left gripper -- Place the compasses on the brown plate with the left gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the potato chips on the brown plate with the left gripper -- Place the plugboard on the brown plate with the left gripper -- Place the square chewing gum on the brown plate with the right gripper -- Place the banana on the brown plate with the right gripper -- Grasp the tin with the right gripper -- Place the hard facial cleanser on the brown plate with the left gripper -- Place the square wooden block on the brown plate with the left gripper -- Place the square chewing gum on the brown plate with the left gripper -- Grasp the tape with the right gripper -- Grasp the coke with the right gripper -- Grasp the tape with the left gripper -- Grasp the square chewing gum with the right gripper -- Grasp the shower sphere with the right gripper -- Place the square wooden block on the brown plate with the right gripper -- Place the hard facial cleanser on the brown plate with the right gripper -- Grasp the bread slice with the left gripper -- Grasp the blackboard erasure with the right gripper -- Place the bread slice on the brown plate with the right gripper -- Grasp the round bread with the left gripper -- Grasp the chocolate with the left gripper -- Grasp the square wooden block with the left gripper -- Grasp the blue cup with the right gripper -- Grasp the soft facial cleanser with the right gripper -- Place the chocolate cake on the brown plate with the left gripper -- Grasp the square wooden block with the right gripper -- Grasp the green lemon with the left gripper -- Place the round wooden block on the brown plate with the left gripper -- Place the round bread on the brown plate with the left gripper -- Place the blue pot on the brown plate with the left gripper -- Grasp the round wooden block with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Place the blue pot on the brown plate with the right gripper + subtask_index: 0 +- subtask: Grasp the blue pot with the left gripper + subtask_index: 1 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 2 +- subtask: Place the soft facial cleanser on the brown plate with the right gripper + subtask_index: 3 +- subtask: Place the back scratcher on the brown plate with the right gripper + subtask_index: 4 +- subtask: Place the blackboard erasure on the brown plate with the left gripper + subtask_index: 5 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 6 +- subtask: Grasp the banana with the left gripper + subtask_index: 7 +- subtask: Place the coke on the brown plate with the right gripper + subtask_index: 8 +- subtask: Place the chocolate on the brown plate with the left gripper + subtask_index: 9 +- subtask: Place the duck toy on the brown plate with the right gripper + subtask_index: 10 +- subtask: Grasp the compasses with the right gripper + subtask_index: 11 +- subtask: Place the peach on the brown plate with the left gripper + subtask_index: 12 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 13 +- subtask: Place the round wooden block on the brown plate with the right gripper + subtask_index: 14 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 15 +- subtask: Place the green lemon on the brown plate with the left gripper + subtask_index: 16 +- subtask: Place the shower sphere on the brown plate with the left gripper + subtask_index: 17 +- subtask: Place the shower sphere on the brown plate with the right gripper + subtask_index: 18 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 19 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 20 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 21 +- subtask: Place the yogurt on the brown plate with the right gripper + subtask_index: 22 +- subtask: Place the banana on the brown plate with the left gripper + subtask_index: 23 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 24 +- subtask: Place the brown towel on the brown plate with the left gripper + subtask_index: 25 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 26 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 27 +- subtask: Place the blue cup on the brown plate with the right gripper + subtask_index: 28 +- subtask: Place the plugboard on the brown plate with the right gripper + subtask_index: 29 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 30 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 31 +- subtask: Place the duck toy on the brown plate with the left gripper + subtask_index: 32 +- subtask: Place the round bread on the brown plate with the right gripper + subtask_index: 33 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 34 +- subtask: Place the bread slice on the brown plate with the left gripper + subtask_index: 35 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 36 +- subtask: Place the chocolate cake on the brown plate with the right gripper + subtask_index: 37 +- subtask: Grasp the peach with the left gripper + subtask_index: 38 +- subtask: Place the tin on the brown plate with the right gripper + subtask_index: 39 +- subtask: Place the tape on the brown plate with the right gripper + subtask_index: 40 +- subtask: Place the blackboard erasure on the brown plate with the right gripper + subtask_index: 41 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 42 +- subtask: Place the potato chips on the brown plate with the right gripper + subtask_index: 43 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 44 +- subtask: Place the tape on the brown plate with the left gripper + subtask_index: 45 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 46 +- subtask: End + subtask_index: 47 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 48 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 49 +- subtask: Place the brown towel on the brown plate with the right gripper + subtask_index: 50 +- subtask: Place the blue cup on the brown plate with the left gripper + subtask_index: 51 +- subtask: Place the compasses on the brown plate with the right gripper + subtask_index: 52 +- subtask: Grasp the compasses with the left gripper + subtask_index: 53 +- subtask: Place the compasses on the brown plate with the left gripper + subtask_index: 54 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 55 +- subtask: Grasp the round bread with the right gripper + subtask_index: 56 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 57 +- subtask: Place the potato chips on the brown plate with the left gripper + subtask_index: 58 +- subtask: Place the plugboard on the brown plate with the left gripper + subtask_index: 59 +- subtask: Place the square chewing gum on the brown plate with the right gripper + subtask_index: 60 +- subtask: Place the banana on the brown plate with the right gripper + subtask_index: 61 +- subtask: Grasp the tin with the right gripper + subtask_index: 62 +- subtask: Place the hard facial cleanser on the brown plate with the left gripper + subtask_index: 63 +- subtask: Place the square wooden block on the brown plate with the left gripper + subtask_index: 64 +- subtask: Place the square chewing gum on the brown plate with the left gripper + subtask_index: 65 +- subtask: Grasp the tape with the right gripper + subtask_index: 66 +- subtask: Grasp the coke with the right gripper + subtask_index: 67 +- subtask: Grasp the tape with the left gripper + subtask_index: 68 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 69 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 70 +- subtask: Place the square wooden block on the brown plate with the right gripper + subtask_index: 71 +- subtask: Place the hard facial cleanser on the brown plate with the right gripper + subtask_index: 72 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 73 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 74 +- subtask: Place the bread slice on the brown plate with the right gripper + subtask_index: 75 +- subtask: Grasp the round bread with the left gripper + subtask_index: 76 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 77 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 78 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 79 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 80 +- subtask: Place the chocolate cake on the brown plate with the left gripper + subtask_index: 81 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 82 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 83 +- subtask: Place the round wooden block on the brown plate with the left gripper + subtask_index: 84 +- subtask: Place the round bread on the brown plate with the left gripper + subtask_index: 85 +- subtask: Place the blue pot on the brown plate with the left gripper + subtask_index: 86 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 87 +- subtask: Grasp the banana with the right gripper + subtask_index: 88 +- subtask: 'null' + subtask_index: 89 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -312,13 +403,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -326,8 +414,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 16390 fps: 30 @@ -414,11 +501,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_brown_plate_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -686,7 +771,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -694,7 +779,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -721,359 +805,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_brown_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the brown plate. - sub_tasks: - - subtask: Place the blue pot on the brown plate with the right gripper - subtask_index: 0 - - subtask: Grasp the blue pot with the left gripper - subtask_index: 1 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 2 - - subtask: Place the soft facial cleanser on the brown plate with the right gripper - subtask_index: 3 - - subtask: Place the back scratcher on the brown plate with the right gripper - subtask_index: 4 - - subtask: Place the blackboard erasure on the brown plate with the left gripper - subtask_index: 5 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 6 - - subtask: Grasp the banana with the left gripper - subtask_index: 7 - - subtask: Place the coke on the brown plate with the right gripper - subtask_index: 8 - - subtask: Place the chocolate on the brown plate with the left gripper - subtask_index: 9 - - subtask: Place the duck toy on the brown plate with the right gripper - subtask_index: 10 - - subtask: Grasp the compasses with the right gripper - subtask_index: 11 - - subtask: Place the peach on the brown plate with the left gripper - subtask_index: 12 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 13 - - subtask: Place the round wooden block on the brown plate with the right gripper - subtask_index: 14 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 15 - - subtask: Place the green lemon on the brown plate with the left gripper - subtask_index: 16 - - subtask: Place the shower sphere on the brown plate with the left gripper - subtask_index: 17 - - subtask: Place the shower sphere on the brown plate with the right gripper - subtask_index: 18 - - subtask: Grasp the back scratcher with the right gripper - subtask_index: 19 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 20 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 21 - - subtask: Place the yogurt on the brown plate with the right gripper - subtask_index: 22 - - subtask: Place the banana on the brown plate with the left gripper - subtask_index: 23 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 24 - - subtask: Place the brown towel on the brown plate with the left gripper - subtask_index: 25 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 26 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 27 - - subtask: Place the blue cup on the brown plate with the right gripper - subtask_index: 28 - - subtask: Place the plugboard on the brown plate with the right gripper - subtask_index: 29 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 30 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 31 - - subtask: Place the duck toy on the brown plate with the left gripper - subtask_index: 32 - - subtask: Place the round bread on the brown plate with the right gripper - subtask_index: 33 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 34 - - subtask: Place the bread slice on the brown plate with the left gripper - subtask_index: 35 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 36 - - subtask: Place the chocolate cake on the brown plate with the right gripper - subtask_index: 37 - - subtask: Grasp the peach with the left gripper - subtask_index: 38 - - subtask: Place the tin on the brown plate with the right gripper - subtask_index: 39 - - subtask: Place the tape on the brown plate with the right gripper - subtask_index: 40 - - subtask: Place the blackboard erasure on the brown plate with the right gripper - subtask_index: 41 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 42 - - subtask: Place the potato chips on the brown plate with the right gripper - subtask_index: 43 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 44 - - subtask: Place the tape on the brown plate with the left gripper - subtask_index: 45 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 46 - - subtask: End - subtask_index: 47 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 48 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 49 - - subtask: Place the brown towel on the brown plate with the right gripper - subtask_index: 50 - - subtask: Place the blue cup on the brown plate with the left gripper - subtask_index: 51 - - subtask: Place the compasses on the brown plate with the right gripper - subtask_index: 52 - - subtask: Grasp the compasses with the left gripper - subtask_index: 53 - - subtask: Place the compasses on the brown plate with the left gripper - subtask_index: 54 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 55 - - subtask: Grasp the round bread with the right gripper - subtask_index: 56 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 57 - - subtask: Place the potato chips on the brown plate with the left gripper - subtask_index: 58 - - subtask: Place the plugboard on the brown plate with the left gripper - subtask_index: 59 - - subtask: Place the square chewing gum on the brown plate with the right gripper - subtask_index: 60 - - subtask: Place the banana on the brown plate with the right gripper - subtask_index: 61 - - subtask: Grasp the tin with the right gripper - subtask_index: 62 - - subtask: Place the hard facial cleanser on the brown plate with the left gripper - subtask_index: 63 - - subtask: Place the square wooden block on the brown plate with the left gripper - subtask_index: 64 - - subtask: Place the square chewing gum on the brown plate with the left gripper - subtask_index: 65 - - subtask: Grasp the tape with the right gripper - subtask_index: 66 - - subtask: Grasp the coke with the right gripper - subtask_index: 67 - - subtask: Grasp the tape with the left gripper - subtask_index: 68 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 69 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 70 - - subtask: Place the square wooden block on the brown plate with the right gripper - subtask_index: 71 - - subtask: Place the hard facial cleanser on the brown plate with the right gripper - subtask_index: 72 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 73 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 74 - - subtask: Place the bread slice on the brown plate with the right gripper - subtask_index: 75 - - subtask: Grasp the round bread with the left gripper - subtask_index: 76 - - subtask: Grasp the chocolate with the left gripper - subtask_index: 77 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 78 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 79 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 80 - - subtask: Place the chocolate cake on the brown plate with the left gripper - subtask_index: 81 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 82 - - subtask: Grasp the green lemon with the left gripper - subtask_index: 83 - - subtask: Place the round wooden block on the brown plate with the left gripper - subtask_index: 84 - - subtask: Place the round bread on the brown plate with the left gripper - subtask_index: 85 - - subtask: Place the blue pot on the brown plate with the left gripper - subtask_index: 86 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 87 - - subtask: Grasp the banana with the right gripper - subtask_index: 88 - - subtask: 'null' - subtask_index: 89 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 16390 - dataset_size: 615.40 MB - data_structure: 'Galaxea_R1_Lite_storage_object_brown_plate_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_dish.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_dish.yaml index 105131c8147477632ab604d4b0491aaf914b744d..0cce8b75d4ba3d37c850277d01dc6ddc22e8df33 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_dish.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_dish.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: dish level1: plates level2: dish @@ -195,113 +195,204 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the dish. +task_instruction: +- use a gripper to pick the target object and place on the dish. sub_tasks: -- Grasp the blue pot with the left gripper -- Place the back scratcher on the dish with the right gripper -- Grasp the plugboard with the left gripper -- Place the soft facial cleanser on the dish with the right gripper -- Grasp the potato chips with the right gripper -- Grasp the banana with the left gripper -- Grasp the compasses with the right gripper -- Grasp the duck toy with the left gripper -- Place the round bread on the dish with the left gripper -- Grasp the blue cup with the left gripper -- Place the compasses on the dish with the right gripper -- Place the duck toy on the dish with the right gripper -- Place the round wooden block on the dish with the right gripper -- Place the green lemon on the dish with the right gripper -- Grasp the back scratcher with the right gripper -- Grasp the square chewing gum with the left gripper -- Grasp the chocolate cake with the right gripper -- Grasp the shower sphere with the left gripper -- Place the peach on the dish with the left gripper -- Grasp the plugboard with the right gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Place the brown towel on the dish with the right gripper -- Grasp the hard facial cleanser with the left gripper -- Place the plugboard on the dish with the right gripper -- Grasp the peach with the right gripper -- Grasp the brown towel with the right gripper -- Place the coke on the dish with the right gripper -- Place the banana on the dish with the left gripper -- Place the peach on the dish with the right gripper -- Grasp the hard facial cleanser with the right gripper -- Place the potato chips on the dish with the right gripper -- Grasp the peach with the left gripper -- Grasp the green lemon with the right gripper -- Place the chocolate cake on the dish with the right gripper -- Place the plugboard on the dish with the left gripper -- Grasp the bread slice with the right gripper -- Place the square chewing gum on the dish with the right gripper -- Grasp the duck toy with the right gripper -- End -- Place the blackboard erasure on the dish with the left gripper -- Grasp the blackboard erasure with the left gripper -- Grasp the coke with the left gripper -- Place the tape on the dish with the right gripper -- Place the bread slice on the dish with the right gripper -- Place the shower sphere on the dish with the right gripper -- Grasp the round wooden block with the left gripper -- Grasp the compasses with the left gripper -- Place the banana on the dish with the right gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the square chewing gum on the dish with the left gripper -- Place the blue cup on the dish with the right gripper -- Place the hard facial cleanser on the dish with the right gripper -- Place the blue cup on the dish with the left gripper -- Grasp the tape with the right gripper -- Place the duck toy on the dish with the left gripper -- Grasp the coke with the right gripper -- Place the square wooden block on the dish with the right gripper -- Grasp the square chewing gum with the right gripper -- Place the round bread on the dish with the right gripper -- Grasp the shower sphere with the right gripper -- Place the brown towel on the dish with the left gripper -- Place the bread slice on the dish with the left gripper -- Grasp the bread slice with the left gripper -- Grasp the yogurt with the left gripper -- Place the blue pot on the dish with the left gripper -- Grasp the blackboard erasure with the right gripper -- Place the coke on the dish with the left gripper -- Place the square wooden block on the dish with the left gripper -- Place the yogurt on the dish with the left gripper -- Place the chocolate cake on the dish with the left gripper -- Place the compasses on the dish with the left gripper -- Place the round wooden block on the dish with the left gripper -- Place the chocolate on the dish with the left gripper -- Grasp the round bread with the left gripper -- Grasp the chocolate with the left gripper -- Grasp the square wooden block with the left gripper -- Grasp the blue cup with the right gripper -- Grasp the soft facial cleanser with the right gripper -- Place the shower sphere on the dish with the left gripper -- Place the tin on the dish with the left gripper -- Grasp the square wooden block with the right gripper -- Place the hard facial cleanser on the dish with the left gripper -- Place the blackboard erasure on the dish with the right gripper -- Place the blue pot on the dish with the right gripper -- Grasp the round wooden block with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Grasp the blue pot with the left gripper + subtask_index: 0 +- subtask: Place the back scratcher on the dish with the right gripper + subtask_index: 1 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 2 +- subtask: Place the soft facial cleanser on the dish with the right gripper + subtask_index: 3 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 4 +- subtask: Grasp the banana with the left gripper + subtask_index: 5 +- subtask: Grasp the compasses with the right gripper + subtask_index: 6 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 7 +- subtask: Place the round bread on the dish with the left gripper + subtask_index: 8 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 9 +- subtask: Place the compasses on the dish with the right gripper + subtask_index: 10 +- subtask: Place the duck toy on the dish with the right gripper + subtask_index: 11 +- subtask: Place the round wooden block on the dish with the right gripper + subtask_index: 12 +- subtask: Place the green lemon on the dish with the right gripper + subtask_index: 13 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 14 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 15 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 16 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 17 +- subtask: Place the peach on the dish with the left gripper + subtask_index: 18 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 19 +- subtask: Grasp the tin with the left gripper + subtask_index: 20 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 21 +- subtask: Place the brown towel on the dish with the right gripper + subtask_index: 22 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 23 +- subtask: Place the plugboard on the dish with the right gripper + subtask_index: 24 +- subtask: Grasp the peach with the right gripper + subtask_index: 25 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 26 +- subtask: Place the coke on the dish with the right gripper + subtask_index: 27 +- subtask: Place the banana on the dish with the left gripper + subtask_index: 28 +- subtask: Place the peach on the dish with the right gripper + subtask_index: 29 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 30 +- subtask: Place the potato chips on the dish with the right gripper + subtask_index: 31 +- subtask: Grasp the peach with the left gripper + subtask_index: 32 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 33 +- subtask: Place the chocolate cake on the dish with the right gripper + subtask_index: 34 +- subtask: Place the plugboard on the dish with the left gripper + subtask_index: 35 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 36 +- subtask: Place the square chewing gum on the dish with the right gripper + subtask_index: 37 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 38 +- subtask: End + subtask_index: 39 +- subtask: Place the blackboard erasure on the dish with the left gripper + subtask_index: 40 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 41 +- subtask: Grasp the coke with the left gripper + subtask_index: 42 +- subtask: Place the tape on the dish with the right gripper + subtask_index: 43 +- subtask: Place the bread slice on the dish with the right gripper + subtask_index: 44 +- subtask: Place the shower sphere on the dish with the right gripper + subtask_index: 45 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 46 +- subtask: Grasp the compasses with the left gripper + subtask_index: 47 +- subtask: Place the banana on the dish with the right gripper + subtask_index: 48 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 49 +- subtask: Grasp the round bread with the right gripper + subtask_index: 50 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 51 +- subtask: Place the square chewing gum on the dish with the left gripper + subtask_index: 52 +- subtask: Place the blue cup on the dish with the right gripper + subtask_index: 53 +- subtask: Place the hard facial cleanser on the dish with the right gripper + subtask_index: 54 +- subtask: Place the blue cup on the dish with the left gripper + subtask_index: 55 +- subtask: Grasp the tape with the right gripper + subtask_index: 56 +- subtask: Place the duck toy on the dish with the left gripper + subtask_index: 57 +- subtask: Grasp the coke with the right gripper + subtask_index: 58 +- subtask: Place the square wooden block on the dish with the right gripper + subtask_index: 59 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 60 +- subtask: Place the round bread on the dish with the right gripper + subtask_index: 61 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 62 +- subtask: Place the brown towel on the dish with the left gripper + subtask_index: 63 +- subtask: Place the bread slice on the dish with the left gripper + subtask_index: 64 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 65 +- subtask: Grasp the yogurt with the left gripper + subtask_index: 66 +- subtask: Place the blue pot on the dish with the left gripper + subtask_index: 67 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 68 +- subtask: Place the coke on the dish with the left gripper + subtask_index: 69 +- subtask: Place the square wooden block on the dish with the left gripper + subtask_index: 70 +- subtask: Place the yogurt on the dish with the left gripper + subtask_index: 71 +- subtask: Place the chocolate cake on the dish with the left gripper + subtask_index: 72 +- subtask: Place the compasses on the dish with the left gripper + subtask_index: 73 +- subtask: Place the round wooden block on the dish with the left gripper + subtask_index: 74 +- subtask: Place the chocolate on the dish with the left gripper + subtask_index: 75 +- subtask: Grasp the round bread with the left gripper + subtask_index: 76 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 77 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 78 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 79 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 80 +- subtask: Place the shower sphere on the dish with the left gripper + subtask_index: 81 +- subtask: Place the tin on the dish with the left gripper + subtask_index: 82 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 83 +- subtask: Place the hard facial cleanser on the dish with the left gripper + subtask_index: 84 +- subtask: Place the blackboard erasure on the dish with the right gripper + subtask_index: 85 +- subtask: Place the blue pot on the dish with the right gripper + subtask_index: 86 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 87 +- subtask: Grasp the banana with the right gripper + subtask_index: 88 +- subtask: 'null' + subtask_index: 89 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -312,13 +403,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -326,8 +414,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 101 total_frames: 26346 fps: 30 @@ -414,11 +501,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_dish_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:100 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -686,7 +771,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -694,7 +779,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -721,359 +805,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_dish - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the dish. - sub_tasks: - - subtask: Grasp the blue pot with the left gripper - subtask_index: 0 - - subtask: Place the back scratcher on the dish with the right gripper - subtask_index: 1 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 2 - - subtask: Place the soft facial cleanser on the dish with the right gripper - subtask_index: 3 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 4 - - subtask: Grasp the banana with the left gripper - subtask_index: 5 - - subtask: Grasp the compasses with the right gripper - subtask_index: 6 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 7 - - subtask: Place the round bread on the dish with the left gripper - subtask_index: 8 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 9 - - subtask: Place the compasses on the dish with the right gripper - subtask_index: 10 - - subtask: Place the duck toy on the dish with the right gripper - subtask_index: 11 - - subtask: Place the round wooden block on the dish with the right gripper - subtask_index: 12 - - subtask: Place the green lemon on the dish with the right gripper - subtask_index: 13 - - subtask: Grasp the back scratcher with the right gripper - subtask_index: 14 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 15 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 16 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 17 - - subtask: Place the peach on the dish with the left gripper - subtask_index: 18 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 19 - - subtask: Grasp the tin with the left gripper - subtask_index: 20 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 21 - - subtask: Place the brown towel on the dish with the right gripper - subtask_index: 22 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 23 - - subtask: Place the plugboard on the dish with the right gripper - subtask_index: 24 - - subtask: Grasp the peach with the right gripper - subtask_index: 25 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 26 - - subtask: Place the coke on the dish with the right gripper - subtask_index: 27 - - subtask: Place the banana on the dish with the left gripper - subtask_index: 28 - - subtask: Place the peach on the dish with the right gripper - subtask_index: 29 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 30 - - subtask: Place the potato chips on the dish with the right gripper - subtask_index: 31 - - subtask: Grasp the peach with the left gripper - subtask_index: 32 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 33 - - subtask: Place the chocolate cake on the dish with the right gripper - subtask_index: 34 - - subtask: Place the plugboard on the dish with the left gripper - subtask_index: 35 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 36 - - subtask: Place the square chewing gum on the dish with the right gripper - subtask_index: 37 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 38 - - subtask: End - subtask_index: 39 - - subtask: Place the blackboard erasure on the dish with the left gripper - subtask_index: 40 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 41 - - subtask: Grasp the coke with the left gripper - subtask_index: 42 - - subtask: Place the tape on the dish with the right gripper - subtask_index: 43 - - subtask: Place the bread slice on the dish with the right gripper - subtask_index: 44 - - subtask: Place the shower sphere on the dish with the right gripper - subtask_index: 45 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 46 - - subtask: Grasp the compasses with the left gripper - subtask_index: 47 - - subtask: Place the banana on the dish with the right gripper - subtask_index: 48 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 49 - - subtask: Grasp the round bread with the right gripper - subtask_index: 50 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 51 - - subtask: Place the square chewing gum on the dish with the left gripper - subtask_index: 52 - - subtask: Place the blue cup on the dish with the right gripper - subtask_index: 53 - - subtask: Place the hard facial cleanser on the dish with the right gripper - subtask_index: 54 - - subtask: Place the blue cup on the dish with the left gripper - subtask_index: 55 - - subtask: Grasp the tape with the right gripper - subtask_index: 56 - - subtask: Place the duck toy on the dish with the left gripper - subtask_index: 57 - - subtask: Grasp the coke with the right gripper - subtask_index: 58 - - subtask: Place the square wooden block on the dish with the right gripper - subtask_index: 59 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 60 - - subtask: Place the round bread on the dish with the right gripper - subtask_index: 61 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 62 - - subtask: Place the brown towel on the dish with the left gripper - subtask_index: 63 - - subtask: Place the bread slice on the dish with the left gripper - subtask_index: 64 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 65 - - subtask: Grasp the yogurt with the left gripper - subtask_index: 66 - - subtask: Place the blue pot on the dish with the left gripper - subtask_index: 67 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 68 - - subtask: Place the coke on the dish with the left gripper - subtask_index: 69 - - subtask: Place the square wooden block on the dish with the left gripper - subtask_index: 70 - - subtask: Place the yogurt on the dish with the left gripper - subtask_index: 71 - - subtask: Place the chocolate cake on the dish with the left gripper - subtask_index: 72 - - subtask: Place the compasses on the dish with the left gripper - subtask_index: 73 - - subtask: Place the round wooden block on the dish with the left gripper - subtask_index: 74 - - subtask: Place the chocolate on the dish with the left gripper - subtask_index: 75 - - subtask: Grasp the round bread with the left gripper - subtask_index: 76 - - subtask: Grasp the chocolate with the left gripper - subtask_index: 77 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 78 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 79 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 80 - - subtask: Place the shower sphere on the dish with the left gripper - subtask_index: 81 - - subtask: Place the tin on the dish with the left gripper - subtask_index: 82 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 83 - - subtask: Place the hard facial cleanser on the dish with the left gripper - subtask_index: 84 - - subtask: Place the blackboard erasure on the dish with the right gripper - subtask_index: 85 - - subtask: Place the blue pot on the dish with the right gripper - subtask_index: 86 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 87 - - subtask: Grasp the banana with the right gripper - subtask_index: 88 - - subtask: 'null' - subtask_index: 89 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 26346 - dataset_size: 993.15 MB - data_structure: 'Galaxea_R1_Lite_storage_object_dish_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (89 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_gray_plate.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_gray_plate.yaml index ef15c94cfaa473960e1639bc8083701c859dc2aa..1555e16a31adc1369e09c47386f64050d974fa62 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_gray_plate.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_gray_plate.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: gray_plate level1: kitchen_supplies level2: gray_plate @@ -195,115 +195,208 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the gray plate. +task_instruction: +- use a gripper to pick the target object and place on the gray plate. sub_tasks: -- Place the tape on the gray plate with the left gripper -- Grasp the blue pot with the left gripper -- Grasp the plugboard with the left gripper -- Grasp the potato chips with the right gripper -- Grasp the banana with the left gripper -- Place the round wooden block on the gray plate with the left gripper -- Place the peach on the gray plate with the left gripper -- Grasp the compasses with the right gripper -- Place the square chewing gum on the gray plate with the right gripper -- Place the compasses on the gray plate with the right gripper -- Place the potato chips on the gray plate with the right gripper -- Grasp the duck toy with the left gripper -- Place the green lemon on the gray plate with the right gripper -- Grasp the blue cup with the left gripper -- Place the square wooden block on the gray plate with the left gripper -- Grasp the soft facial cleanser with the left gripper -- Place the blue cup on the gray plate with the left gripper -- Grasp the square chewing gum with the left gripper -- Grasp the chocolate cake with the right gripper -- Grasp the shower sphere with the left gripper -- Place the blackboard erasure on the gray plate with the right gripper -- Grasp the plugboard with the right gripper -- Grasp the yogurt with the right gripper -- Grasp the brown towel with the left gripper -- Place the round bread on the gray plate with the right gripper -- Grasp the hard facial cleanser with the left gripper -- Place the shower sphere on the gray plate with the left gripper -- Grasp the chocolate with the right gripper -- Place the soft facial cleanser on the gray plate with the left gripper -- Grasp the peach with the right gripper -- Grasp the brown towel with the right gripper -- Place the shower sphere on the gray plate with the right gripper -- Grasp the back scratcher with the left gripper -- Place the square wooden block on the gray plate with the right gripper -- Place the blackboard erasure on the gray plate with the left gripper -- Place the yogurt on the gray plate with the right gripper -- Grasp the peach with the left gripper -- Place the coke on the gray plate with the left gripper -- Place the chocolate cake on the gray plate with the right gripper -- Place the banana on the gray plate with the left gripper -- Grasp the green lemon with the right gripper -- Grasp the bread slice with the right gripper -- Place the bread slice on the gray plate with the right gripper -- Place the brown towel on the gray plate with the right gripper -- Grasp the potato chips with the left gripper -- Grasp the duck toy with the right gripper -- End -- Grasp the blackboard erasure with the left gripper -- Grasp the coke with the left gripper -- Place the plugboard on the gray plate with the right gripper -- Place the round bread on the gray plate with the left gripper -- Grasp the round wooden block with the left gripper -- Grasp the compasses with the left gripper -- Place the round wooden block on the gray plate with the right gripper -- Place the blue pot on the gray plate with the left gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the brown towel on the gray plate with the left gripper -- Place the blue pot on the gray plate with the right gripper -- Place the compasses on the gray plate with the left gripper -- Place the coke on the gray plate with the right gripper -- Grasp the tin with the right gripper -- Place the square chewing gum on the gray plate with the left gripper -- Place the back scratcher on the gray plate with the left gripper -- Grasp the tape with the right gripper -- Place the tin on the gray plate with the right gripper -- Grasp the coke with the right gripper -- Grasp the tape with the left gripper -- Grasp the square chewing gum with the right gripper -- Place the tape on the gray plate with the right gripper -- Place the hard facial cleanser on the gray plate with the left gripper -- Grasp the shower sphere with the right gripper -- Place the duck toy on the gray plate with the right gripper -- Grasp the bread slice with the left gripper -- Place the chocolate on the gray plate with the right gripper -- Grasp the blackboard erasure with the right gripper -- Place the banana on the gray plate with the right gripper -- Place the peach on the gray plate with the right gripper -- Grasp the round bread with the left gripper -- Grasp the square wooden block with the left gripper -- Grasp the blue cup with the right gripper -- Place the chocolate cake on the gray plate with the left gripper -- Place the duck toy on the gray plate with the left gripper -- Grasp the square wooden block with the right gripper -- Place the plugboard on the gray plate with the left gripper -- Place the bread slice on the gray plate with the left gripper -- Grasp the round wooden block with the right gripper -- Place the potato chips on the gray plate with the left gripper -- Grasp the banana with the right gripper -- Place the blue cup on the gray plate with the right gripper -- 'null' +- subtask: Place the tape on the gray plate with the left gripper + subtask_index: 0 +- subtask: Grasp the blue pot with the left gripper + subtask_index: 1 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 2 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 3 +- subtask: Grasp the banana with the left gripper + subtask_index: 4 +- subtask: Place the round wooden block on the gray plate with the left gripper + subtask_index: 5 +- subtask: Place the peach on the gray plate with the left gripper + subtask_index: 6 +- subtask: Grasp the compasses with the right gripper + subtask_index: 7 +- subtask: Place the square chewing gum on the gray plate with the right gripper + subtask_index: 8 +- subtask: Place the compasses on the gray plate with the right gripper + subtask_index: 9 +- subtask: Place the potato chips on the gray plate with the right gripper + subtask_index: 10 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 11 +- subtask: Place the green lemon on the gray plate with the right gripper + subtask_index: 12 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 13 +- subtask: Place the square wooden block on the gray plate with the left gripper + subtask_index: 14 +- subtask: Grasp the soft facial cleanser with the left gripper + subtask_index: 15 +- subtask: Place the blue cup on the gray plate with the left gripper + subtask_index: 16 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 17 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 18 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 19 +- subtask: Place the blackboard erasure on the gray plate with the right gripper + subtask_index: 20 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 21 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 22 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 23 +- subtask: Place the round bread on the gray plate with the right gripper + subtask_index: 24 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 25 +- subtask: Place the shower sphere on the gray plate with the left gripper + subtask_index: 26 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 27 +- subtask: Place the soft facial cleanser on the gray plate with the left gripper + subtask_index: 28 +- subtask: Grasp the peach with the right gripper + subtask_index: 29 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 30 +- subtask: Place the shower sphere on the gray plate with the right gripper + subtask_index: 31 +- subtask: Grasp the back scratcher with the left gripper + subtask_index: 32 +- subtask: Place the square wooden block on the gray plate with the right gripper + subtask_index: 33 +- subtask: Place the blackboard erasure on the gray plate with the left gripper + subtask_index: 34 +- subtask: Place the yogurt on the gray plate with the right gripper + subtask_index: 35 +- subtask: Grasp the peach with the left gripper + subtask_index: 36 +- subtask: Place the coke on the gray plate with the left gripper + subtask_index: 37 +- subtask: Place the chocolate cake on the gray plate with the right gripper + subtask_index: 38 +- subtask: Place the banana on the gray plate with the left gripper + subtask_index: 39 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 40 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 41 +- subtask: Place the bread slice on the gray plate with the right gripper + subtask_index: 42 +- subtask: Place the brown towel on the gray plate with the right gripper + subtask_index: 43 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 44 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 45 +- subtask: End + subtask_index: 46 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 47 +- subtask: Grasp the coke with the left gripper + subtask_index: 48 +- subtask: Place the plugboard on the gray plate with the right gripper + subtask_index: 49 +- subtask: Place the round bread on the gray plate with the left gripper + subtask_index: 50 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 51 +- subtask: Grasp the compasses with the left gripper + subtask_index: 52 +- subtask: Place the round wooden block on the gray plate with the right gripper + subtask_index: 53 +- subtask: Place the blue pot on the gray plate with the left gripper + subtask_index: 54 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 55 +- subtask: Grasp the round bread with the right gripper + subtask_index: 56 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 57 +- subtask: Place the brown towel on the gray plate with the left gripper + subtask_index: 58 +- subtask: Place the blue pot on the gray plate with the right gripper + subtask_index: 59 +- subtask: Place the compasses on the gray plate with the left gripper + subtask_index: 60 +- subtask: Place the coke on the gray plate with the right gripper + subtask_index: 61 +- subtask: Grasp the tin with the right gripper + subtask_index: 62 +- subtask: Place the square chewing gum on the gray plate with the left gripper + subtask_index: 63 +- subtask: Place the back scratcher on the gray plate with the left gripper + subtask_index: 64 +- subtask: Grasp the tape with the right gripper + subtask_index: 65 +- subtask: Place the tin on the gray plate with the right gripper + subtask_index: 66 +- subtask: Grasp the coke with the right gripper + subtask_index: 67 +- subtask: Grasp the tape with the left gripper + subtask_index: 68 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 69 +- subtask: Place the tape on the gray plate with the right gripper + subtask_index: 70 +- subtask: Place the hard facial cleanser on the gray plate with the left gripper + subtask_index: 71 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 72 +- subtask: Place the duck toy on the gray plate with the right gripper + subtask_index: 73 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 74 +- subtask: Place the chocolate on the gray plate with the right gripper + subtask_index: 75 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 76 +- subtask: Place the banana on the gray plate with the right gripper + subtask_index: 77 +- subtask: Place the peach on the gray plate with the right gripper + subtask_index: 78 +- subtask: Grasp the round bread with the left gripper + subtask_index: 79 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 80 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 81 +- subtask: Place the chocolate cake on the gray plate with the left gripper + subtask_index: 82 +- subtask: Place the duck toy on the gray plate with the left gripper + subtask_index: 83 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 84 +- subtask: Place the plugboard on the gray plate with the left gripper + subtask_index: 85 +- subtask: Place the bread slice on the gray plate with the left gripper + subtask_index: 86 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 87 +- subtask: Place the potato chips on the gray plate with the left gripper + subtask_index: 88 +- subtask: Grasp the banana with the right gripper + subtask_index: 89 +- subtask: Place the blue cup on the gray plate with the right gripper + subtask_index: 90 +- subtask: 'null' + subtask_index: 91 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -314,13 +407,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -328,8 +418,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 20357 fps: 30 @@ -416,11 +505,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_gray_plate_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -688,7 +775,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -696,7 +783,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -723,363 +809,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_gray_plate - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the gray plate. - sub_tasks: - - subtask: Place the tape on the gray plate with the left gripper - subtask_index: 0 - - subtask: Grasp the blue pot with the left gripper - subtask_index: 1 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 2 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 3 - - subtask: Grasp the banana with the left gripper - subtask_index: 4 - - subtask: Place the round wooden block on the gray plate with the left gripper - subtask_index: 5 - - subtask: Place the peach on the gray plate with the left gripper - subtask_index: 6 - - subtask: Grasp the compasses with the right gripper - subtask_index: 7 - - subtask: Place the square chewing gum on the gray plate with the right gripper - subtask_index: 8 - - subtask: Place the compasses on the gray plate with the right gripper - subtask_index: 9 - - subtask: Place the potato chips on the gray plate with the right gripper - subtask_index: 10 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 11 - - subtask: Place the green lemon on the gray plate with the right gripper - subtask_index: 12 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 13 - - subtask: Place the square wooden block on the gray plate with the left gripper - subtask_index: 14 - - subtask: Grasp the soft facial cleanser with the left gripper - subtask_index: 15 - - subtask: Place the blue cup on the gray plate with the left gripper - subtask_index: 16 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 17 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 18 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 19 - - subtask: Place the blackboard erasure on the gray plate with the right gripper - subtask_index: 20 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 21 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 22 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 23 - - subtask: Place the round bread on the gray plate with the right gripper - subtask_index: 24 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 25 - - subtask: Place the shower sphere on the gray plate with the left gripper - subtask_index: 26 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 27 - - subtask: Place the soft facial cleanser on the gray plate with the left gripper - subtask_index: 28 - - subtask: Grasp the peach with the right gripper - subtask_index: 29 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 30 - - subtask: Place the shower sphere on the gray plate with the right gripper - subtask_index: 31 - - subtask: Grasp the back scratcher with the left gripper - subtask_index: 32 - - subtask: Place the square wooden block on the gray plate with the right gripper - subtask_index: 33 - - subtask: Place the blackboard erasure on the gray plate with the left gripper - subtask_index: 34 - - subtask: Place the yogurt on the gray plate with the right gripper - subtask_index: 35 - - subtask: Grasp the peach with the left gripper - subtask_index: 36 - - subtask: Place the coke on the gray plate with the left gripper - subtask_index: 37 - - subtask: Place the chocolate cake on the gray plate with the right gripper - subtask_index: 38 - - subtask: Place the banana on the gray plate with the left gripper - subtask_index: 39 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 40 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 41 - - subtask: Place the bread slice on the gray plate with the right gripper - subtask_index: 42 - - subtask: Place the brown towel on the gray plate with the right gripper - subtask_index: 43 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 44 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 45 - - subtask: End - subtask_index: 46 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 47 - - subtask: Grasp the coke with the left gripper - subtask_index: 48 - - subtask: Place the plugboard on the gray plate with the right gripper - subtask_index: 49 - - subtask: Place the round bread on the gray plate with the left gripper - subtask_index: 50 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 51 - - subtask: Grasp the compasses with the left gripper - subtask_index: 52 - - subtask: Place the round wooden block on the gray plate with the right gripper - subtask_index: 53 - - subtask: Place the blue pot on the gray plate with the left gripper - subtask_index: 54 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 55 - - subtask: Grasp the round bread with the right gripper - subtask_index: 56 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 57 - - subtask: Place the brown towel on the gray plate with the left gripper - subtask_index: 58 - - subtask: Place the blue pot on the gray plate with the right gripper - subtask_index: 59 - - subtask: Place the compasses on the gray plate with the left gripper - subtask_index: 60 - - subtask: Place the coke on the gray plate with the right gripper - subtask_index: 61 - - subtask: Grasp the tin with the right gripper - subtask_index: 62 - - subtask: Place the square chewing gum on the gray plate with the left gripper - subtask_index: 63 - - subtask: Place the back scratcher on the gray plate with the left gripper - subtask_index: 64 - - subtask: Grasp the tape with the right gripper - subtask_index: 65 - - subtask: Place the tin on the gray plate with the right gripper - subtask_index: 66 - - subtask: Grasp the coke with the right gripper - subtask_index: 67 - - subtask: Grasp the tape with the left gripper - subtask_index: 68 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 69 - - subtask: Place the tape on the gray plate with the right gripper - subtask_index: 70 - - subtask: Place the hard facial cleanser on the gray plate with the left gripper - subtask_index: 71 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 72 - - subtask: Place the duck toy on the gray plate with the right gripper - subtask_index: 73 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 74 - - subtask: Place the chocolate on the gray plate with the right gripper - subtask_index: 75 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 76 - - subtask: Place the banana on the gray plate with the right gripper - subtask_index: 77 - - subtask: Place the peach on the gray plate with the right gripper - subtask_index: 78 - - subtask: Grasp the round bread with the left gripper - subtask_index: 79 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 80 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 81 - - subtask: Place the chocolate cake on the gray plate with the left gripper - subtask_index: 82 - - subtask: Place the duck toy on the gray plate with the left gripper - subtask_index: 83 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 84 - - subtask: Place the plugboard on the gray plate with the left gripper - subtask_index: 85 - - subtask: Place the bread slice on the gray plate with the left gripper - subtask_index: 86 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 87 - - subtask: Place the potato chips on the gray plate with the left gripper - subtask_index: 88 - - subtask: Grasp the banana with the right gripper - subtask_index: 89 - - subtask: Place the blue cup on the gray plate with the right gripper - subtask_index: 90 - - subtask: 'null' - subtask_index: 91 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 20357 - dataset_size: 786.26 MB - data_structure: 'Galaxea_R1_Lite_storage_object_gray_plate_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_pink_bowl.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_pink_bowl.yaml index 2ab9c05ffc4f02f1ece1d535f9b92370872552dc..d6fe6b7e3b2ebdc491f6ff69f805d65d277ff9a6 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_pink_bowl.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_pink_bowl.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: pink_bowl level1: plastic_bowl level2: pink_bowl @@ -195,113 +195,204 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the pink bowl. +task_instruction: +- use a gripper to pick the target object and place on the pink bowl. sub_tasks: -- Place the bread slice on the pink bowl with the left gripper -- Place the round wooden block on the pink bowl with the left gripper -- Grasp the blue pot with the left gripper -- Grasp the plugboard with the left gripper -- Place the chocolate on the pink bowl with the right gripper -- Grasp the potato chips with the right gripper -- Place the bread slice on the pink bowl with the right gripper -- Grasp the banana with the left gripper -- Place the round bread on the pink bowl with the right gripper -- Grasp the compasses with the right gripper -- Grasp the duck toy with the left gripper -- Place the banana on the pink bowl with the left gripper -- Place the hard facial cleanser on the pink bowl with the right gripper -- Grasp the blue cup with the left gripper -- Place the brown towel on the pink bowl with the right gripper -- Place the duck toy on the pink bowl with the right gripper -- Place the coke on the pink bowl with the right gripper -- Grasp the square chewing gum with the left gripper -- Grasp the chocolate cake with the right gripper -- Place the banana on the pink bowl with the right gripper -- Grasp the shower sphere with the left gripper -- Grasp the plugboard with the right gripper -- Grasp the yogurt with the right gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Place the peach on the pink bowl with the left gripper -- Grasp the chocolate with the right gripper -- Place the tape on the pink bowl with the right gripper -- Grasp the peach with the right gripper -- Grasp the brown towel with the right gripper -- Grasp the back scratcher with the left gripper -- Place the round wooden block on the pink bowl with the right gripper -- Place the tape on the pink bowl with the left gripper -- Place the duck toy on the pink bowl with the left gripper -- Place the shower sphere on the pink bowl with the left gripper -- Place the blue cup on the pink bowl with the left gripper -- Grasp the hard facial cleanser with the right gripper -- Place the blue pot on the pink bowl with the left gripper -- Place the coke on the pink bowl with the left gripper -- Place the blue cup on the pink bowl with the right gripper -- Grasp the peach with the left gripper -- Place the potato chips on the pink bowl with the right gripper -- Place the square chewing gum on the pink bowl with the left gripper -- Place the square wooden block on the pink bowl with the right gripper -- Place the blackboard erasure on the pink bowl with the left gripper -- Grasp the bread slice with the right gripper -- Grasp the potato chips with the left gripper -- Grasp the duck toy with the right gripper -- End -- Place the compasses on the pink bowl with the left gripper -- Place the blue pot on the pink bowl with the right gripper -- Grasp the blackboard erasure with the left gripper -- Grasp the coke with the left gripper -- Place the chocolate cake on the pink bowl with the right gripper -- Grasp the round wooden block with the left gripper -- Place the blackboard erasure on the pink bowl with the right gripper -- Grasp the compasses with the left gripper -- Place the peach on the pink bowl with the right gripper -- Place the soft facial cleanser on the pink bowl with the right gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Place the plugboard on the pink bowl with the left gripper -- Grasp the tin with the right gripper -- Place the square chewing gum on the pink bowl with the right gripper -- Place the compasses on the pink bowl with the right gripper -- Grasp the tape with the right gripper -- Place the shower sphere on the pink bowl with the right gripper -- Grasp the coke with the right gripper -- Grasp the tape with the left gripper -- Place the tin on the pink bowl with the right gripper -- Grasp the square chewing gum with the right gripper -- Grasp the shower sphere with the right gripper -- Grasp the bread slice with the left gripper -- Grasp the blackboard erasure with the right gripper -- Place the brown towel on the pink bowl with the left gripper -- Place the square wooden block on the pink bowl with the left gripper -- Grasp the square wooden block with the left gripper -- Grasp the blue cup with the right gripper -- Place the back scratcher on the pink bowl with the left gripper -- Grasp the soft facial cleanser with the right gripper -- Place the green lemon on the pink bowl with the left gripper -- Grasp the square wooden block with the right gripper -- Grasp the green lemon with the left gripper -- Place the tin on the pink bowl with the left gripper -- Place the yogurt on the pink bowl with the right gripper -- Place the potato chips on the pink bowl with the left gripper -- Grasp the round wooden block with the right gripper -- Grasp the banana with the right gripper -- Place the plugboard on the pink bowl with the right gripper -- 'null' +- subtask: Place the bread slice on the pink bowl with the left gripper + subtask_index: 0 +- subtask: Place the round wooden block on the pink bowl with the left gripper + subtask_index: 1 +- subtask: Grasp the blue pot with the left gripper + subtask_index: 2 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 3 +- subtask: Place the chocolate on the pink bowl with the right gripper + subtask_index: 4 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 5 +- subtask: Place the bread slice on the pink bowl with the right gripper + subtask_index: 6 +- subtask: Grasp the banana with the left gripper + subtask_index: 7 +- subtask: Place the round bread on the pink bowl with the right gripper + subtask_index: 8 +- subtask: Grasp the compasses with the right gripper + subtask_index: 9 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 10 +- subtask: Place the banana on the pink bowl with the left gripper + subtask_index: 11 +- subtask: Place the hard facial cleanser on the pink bowl with the right gripper + subtask_index: 12 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 13 +- subtask: Place the brown towel on the pink bowl with the right gripper + subtask_index: 14 +- subtask: Place the duck toy on the pink bowl with the right gripper + subtask_index: 15 +- subtask: Place the coke on the pink bowl with the right gripper + subtask_index: 16 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 17 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 18 +- subtask: Place the banana on the pink bowl with the right gripper + subtask_index: 19 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 20 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 21 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 22 +- subtask: Grasp the tin with the left gripper + subtask_index: 23 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 24 +- subtask: Place the peach on the pink bowl with the left gripper + subtask_index: 25 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 26 +- subtask: Place the tape on the pink bowl with the right gripper + subtask_index: 27 +- subtask: Grasp the peach with the right gripper + subtask_index: 28 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 29 +- subtask: Grasp the back scratcher with the left gripper + subtask_index: 30 +- subtask: Place the round wooden block on the pink bowl with the right gripper + subtask_index: 31 +- subtask: Place the tape on the pink bowl with the left gripper + subtask_index: 32 +- subtask: Place the duck toy on the pink bowl with the left gripper + subtask_index: 33 +- subtask: Place the shower sphere on the pink bowl with the left gripper + subtask_index: 34 +- subtask: Place the blue cup on the pink bowl with the left gripper + subtask_index: 35 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 36 +- subtask: Place the blue pot on the pink bowl with the left gripper + subtask_index: 37 +- subtask: Place the coke on the pink bowl with the left gripper + subtask_index: 38 +- subtask: Place the blue cup on the pink bowl with the right gripper + subtask_index: 39 +- subtask: Grasp the peach with the left gripper + subtask_index: 40 +- subtask: Place the potato chips on the pink bowl with the right gripper + subtask_index: 41 +- subtask: Place the square chewing gum on the pink bowl with the left gripper + subtask_index: 42 +- subtask: Place the square wooden block on the pink bowl with the right gripper + subtask_index: 43 +- subtask: Place the blackboard erasure on the pink bowl with the left gripper + subtask_index: 44 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 45 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 46 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 47 +- subtask: End + subtask_index: 48 +- subtask: Place the compasses on the pink bowl with the left gripper + subtask_index: 49 +- subtask: Place the blue pot on the pink bowl with the right gripper + subtask_index: 50 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 51 +- subtask: Grasp the coke with the left gripper + subtask_index: 52 +- subtask: Place the chocolate cake on the pink bowl with the right gripper + subtask_index: 53 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 54 +- subtask: Place the blackboard erasure on the pink bowl with the right gripper + subtask_index: 55 +- subtask: Grasp the compasses with the left gripper + subtask_index: 56 +- subtask: Place the peach on the pink bowl with the right gripper + subtask_index: 57 +- subtask: Place the soft facial cleanser on the pink bowl with the right gripper + subtask_index: 58 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 59 +- subtask: Grasp the round bread with the right gripper + subtask_index: 60 +- subtask: Place the plugboard on the pink bowl with the left gripper + subtask_index: 61 +- subtask: Grasp the tin with the right gripper + subtask_index: 62 +- subtask: Place the square chewing gum on the pink bowl with the right gripper + subtask_index: 63 +- subtask: Place the compasses on the pink bowl with the right gripper + subtask_index: 64 +- subtask: Grasp the tape with the right gripper + subtask_index: 65 +- subtask: Place the shower sphere on the pink bowl with the right gripper + subtask_index: 66 +- subtask: Grasp the coke with the right gripper + subtask_index: 67 +- subtask: Grasp the tape with the left gripper + subtask_index: 68 +- subtask: Place the tin on the pink bowl with the right gripper + subtask_index: 69 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 70 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 71 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 72 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 73 +- subtask: Place the brown towel on the pink bowl with the left gripper + subtask_index: 74 +- subtask: Place the square wooden block on the pink bowl with the left gripper + subtask_index: 75 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 76 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 77 +- subtask: Place the back scratcher on the pink bowl with the left gripper + subtask_index: 78 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 79 +- subtask: Place the green lemon on the pink bowl with the left gripper + subtask_index: 80 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 81 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 82 +- subtask: Place the tin on the pink bowl with the left gripper + subtask_index: 83 +- subtask: Place the yogurt on the pink bowl with the right gripper + subtask_index: 84 +- subtask: Place the potato chips on the pink bowl with the left gripper + subtask_index: 85 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 86 +- subtask: Grasp the banana with the right gripper + subtask_index: 87 +- subtask: Place the plugboard on the pink bowl with the right gripper + subtask_index: 88 +- subtask: 'null' + subtask_index: 89 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -312,13 +403,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -326,8 +414,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 20095 fps: 30 @@ -414,11 +501,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_pink_bowl_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -686,7 +771,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -694,7 +779,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -721,359 +805,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_pink_bowl - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the pink bowl. - sub_tasks: - - subtask: Place the bread slice on the pink bowl with the left gripper - subtask_index: 0 - - subtask: Place the round wooden block on the pink bowl with the left gripper - subtask_index: 1 - - subtask: Grasp the blue pot with the left gripper - subtask_index: 2 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 3 - - subtask: Place the chocolate on the pink bowl with the right gripper - subtask_index: 4 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 5 - - subtask: Place the bread slice on the pink bowl with the right gripper - subtask_index: 6 - - subtask: Grasp the banana with the left gripper - subtask_index: 7 - - subtask: Place the round bread on the pink bowl with the right gripper - subtask_index: 8 - - subtask: Grasp the compasses with the right gripper - subtask_index: 9 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 10 - - subtask: Place the banana on the pink bowl with the left gripper - subtask_index: 11 - - subtask: Place the hard facial cleanser on the pink bowl with the right gripper - subtask_index: 12 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 13 - - subtask: Place the brown towel on the pink bowl with the right gripper - subtask_index: 14 - - subtask: Place the duck toy on the pink bowl with the right gripper - subtask_index: 15 - - subtask: Place the coke on the pink bowl with the right gripper - subtask_index: 16 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 17 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 18 - - subtask: Place the banana on the pink bowl with the right gripper - subtask_index: 19 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 20 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 21 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 22 - - subtask: Grasp the tin with the left gripper - subtask_index: 23 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 24 - - subtask: Place the peach on the pink bowl with the left gripper - subtask_index: 25 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 26 - - subtask: Place the tape on the pink bowl with the right gripper - subtask_index: 27 - - subtask: Grasp the peach with the right gripper - subtask_index: 28 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 29 - - subtask: Grasp the back scratcher with the left gripper - subtask_index: 30 - - subtask: Place the round wooden block on the pink bowl with the right gripper - subtask_index: 31 - - subtask: Place the tape on the pink bowl with the left gripper - subtask_index: 32 - - subtask: Place the duck toy on the pink bowl with the left gripper - subtask_index: 33 - - subtask: Place the shower sphere on the pink bowl with the left gripper - subtask_index: 34 - - subtask: Place the blue cup on the pink bowl with the left gripper - subtask_index: 35 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 36 - - subtask: Place the blue pot on the pink bowl with the left gripper - subtask_index: 37 - - subtask: Place the coke on the pink bowl with the left gripper - subtask_index: 38 - - subtask: Place the blue cup on the pink bowl with the right gripper - subtask_index: 39 - - subtask: Grasp the peach with the left gripper - subtask_index: 40 - - subtask: Place the potato chips on the pink bowl with the right gripper - subtask_index: 41 - - subtask: Place the square chewing gum on the pink bowl with the left gripper - subtask_index: 42 - - subtask: Place the square wooden block on the pink bowl with the right gripper - subtask_index: 43 - - subtask: Place the blackboard erasure on the pink bowl with the left gripper - subtask_index: 44 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 45 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 46 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 47 - - subtask: End - subtask_index: 48 - - subtask: Place the compasses on the pink bowl with the left gripper - subtask_index: 49 - - subtask: Place the blue pot on the pink bowl with the right gripper - subtask_index: 50 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 51 - - subtask: Grasp the coke with the left gripper - subtask_index: 52 - - subtask: Place the chocolate cake on the pink bowl with the right gripper - subtask_index: 53 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 54 - - subtask: Place the blackboard erasure on the pink bowl with the right gripper - subtask_index: 55 - - subtask: Grasp the compasses with the left gripper - subtask_index: 56 - - subtask: Place the peach on the pink bowl with the right gripper - subtask_index: 57 - - subtask: Place the soft facial cleanser on the pink bowl with the right gripper - subtask_index: 58 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 59 - - subtask: Grasp the round bread with the right gripper - subtask_index: 60 - - subtask: Place the plugboard on the pink bowl with the left gripper - subtask_index: 61 - - subtask: Grasp the tin with the right gripper - subtask_index: 62 - - subtask: Place the square chewing gum on the pink bowl with the right gripper - subtask_index: 63 - - subtask: Place the compasses on the pink bowl with the right gripper - subtask_index: 64 - - subtask: Grasp the tape with the right gripper - subtask_index: 65 - - subtask: Place the shower sphere on the pink bowl with the right gripper - subtask_index: 66 - - subtask: Grasp the coke with the right gripper - subtask_index: 67 - - subtask: Grasp the tape with the left gripper - subtask_index: 68 - - subtask: Place the tin on the pink bowl with the right gripper - subtask_index: 69 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 70 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 71 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 72 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 73 - - subtask: Place the brown towel on the pink bowl with the left gripper - subtask_index: 74 - - subtask: Place the square wooden block on the pink bowl with the left gripper - subtask_index: 75 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 76 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 77 - - subtask: Place the back scratcher on the pink bowl with the left gripper - subtask_index: 78 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 79 - - subtask: Place the green lemon on the pink bowl with the left gripper - subtask_index: 80 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 81 - - subtask: Grasp the green lemon with the left gripper - subtask_index: 82 - - subtask: Place the tin on the pink bowl with the left gripper - subtask_index: 83 - - subtask: Place the yogurt on the pink bowl with the right gripper - subtask_index: 84 - - subtask: Place the potato chips on the pink bowl with the left gripper - subtask_index: 85 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 86 - - subtask: Grasp the banana with the right gripper - subtask_index: 87 - - subtask: Place the plugboard on the pink bowl with the right gripper - subtask_index: 88 - - subtask: 'null' - subtask_index: 89 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 20095 - dataset_size: 752.15 MB - data_structure: 'Galaxea_R1_Lite_storage_object_pink_bowl_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_white_box.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_white_box.yaml index 5d559b3942883d7aeb33cfd6027a60156589c9e0..06dd96ec8fefb139d7bd4fec3efd890328d8752c 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_white_box.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_white_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: white_box level1: storage_box level2: white_box @@ -195,115 +195,208 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the white box. +task_instruction: +- use a gripper to pick the target object and place on the white box. sub_tasks: -- Place the tin on the white box with the left gripper -- Grasp the blue pot with the left gripper -- Place the square wooden block on the white box with the right gripper -- Grasp the plugboard with the left gripper -- Place the brown towel on the white box with the left gripper -- Place the peach on the white box with the left gripper -- Grasp the potato chips with the right gripper -- Place the blackboard erasure on the white box with the right gripper -- Grasp the banana with the left gripper -- Place the round bread on the white box with the right gripper -- Grasp the compasses with the right gripper -- Grasp the duck toy with the left gripper -- Place the bread slice on the white box with the right gripper -- Place the blackboard erasure on the white box with the left gripper -- Place the bread slice on the white box with the left gripper -- Place the coke on the white box with the right gripper -- Place the blue cup on the white box with the left gripper -- Grasp the blue cup with the left gripper -- Place the coke on the white box with the left gripper -- Place the square wooden block on the white box with the left gripper -- Place the blue cup on the white box with the right gripper -- Place the duck toy on the white box with the left gripper -- Grasp the back scratcher with the right gripper -- Place the blue pot on the white box with the right gripper -- Grasp the square chewing gum with the left gripper -- Grasp the chocolate cake with the right gripper -- Place the round wooden block on the white box with the right gripper -- Place the square chewing gum on the white box with the right gripper -- Grasp the shower sphere with the left gripper -- Grasp the plugboard with the right gripper -- Grasp the yogurt with the right gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Place the back scratcher into the basket with the right gripper -- Grasp the hard facial cleanser with the left gripper -- Grasp the chocolate with the right gripper -- Place the banana on the white box with the right gripper -- Grasp the brown towel with the right gripper -- Place the shower sphere on the white box with the right gripper -- Place the green lemon into the basket with the right gripper -- Place the compasses on the white box with the right gripper -- Place the banana on the white box with the left gripper -- Grasp the peach with the left gripper -- Place the round wooden block on the white box with the left gripper -- Place the tin on the white box with the right gripper -- Place the potato chips on the white box with the right gripper -- Place the tape on the white box with the left gripper -- Grasp the green lemon with the right gripper -- Place the blue pot on the white box with the left gripper -- Grasp the bread slice with the right gripper -- Place the chocolate cake on the white box with the right gripper -- Place the compasses on the white box with the left gripper -- Grasp the potato chips with the left gripper -- Place the soft facial cleanser on the white box with the right gripper -- Grasp the duck toy with the right gripper -- End -- Place the hard facial cleanser on the white box with the left gripper -- Grasp the blackboard erasure with the left gripper -- Grasp the coke with the left gripper -- Grasp the round wooden block with the left gripper -- Place the square chewing gum on the white box with the left gripper -- Place the tape on the white box with the right gripper -- Place the shower sphere on the white box with the left gripper -- Grasp the compasses with the left gripper -- Place the brown towel on the white box with the right gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the duck toy on the white box with the right gripper -- Place the chocolate on the white box with the right gripper -- Grasp the tin with the right gripper -- Place the chocolate cake on the white box with the left gripper -- Place the plugboard on the white box with the right gripper -- Grasp the tape with the right gripper -- Grasp the coke with the right gripper -- Grasp the tape with the left gripper -- Grasp the square chewing gum with the right gripper -- Grasp the shower sphere with the right gripper -- Grasp the bread slice with the left gripper -- Place the potato chips on the white box with the left gripper -- Grasp the blackboard erasure with the right gripper -- Place the round bread on the white box with the left gripper -- Grasp the round bread with the left gripper -- Grasp the square wooden block with the left gripper -- Grasp the blue cup with the right gripper -- Grasp the soft facial cleanser with the right gripper -- Place the yogurt on the white box with the right gripper -- Grasp the square wooden block with the right gripper -- Place the plugboard on the white box with the left gripper -- Grasp the round wooden block with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Place the tin on the white box with the left gripper + subtask_index: 0 +- subtask: Grasp the blue pot with the left gripper + subtask_index: 1 +- subtask: Place the square wooden block on the white box with the right gripper + subtask_index: 2 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 3 +- subtask: Place the brown towel on the white box with the left gripper + subtask_index: 4 +- subtask: Place the peach on the white box with the left gripper + subtask_index: 5 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 6 +- subtask: Place the blackboard erasure on the white box with the right gripper + subtask_index: 7 +- subtask: Grasp the banana with the left gripper + subtask_index: 8 +- subtask: Place the round bread on the white box with the right gripper + subtask_index: 9 +- subtask: Grasp the compasses with the right gripper + subtask_index: 10 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 11 +- subtask: Place the bread slice on the white box with the right gripper + subtask_index: 12 +- subtask: Place the blackboard erasure on the white box with the left gripper + subtask_index: 13 +- subtask: Place the bread slice on the white box with the left gripper + subtask_index: 14 +- subtask: Place the coke on the white box with the right gripper + subtask_index: 15 +- subtask: Place the blue cup on the white box with the left gripper + subtask_index: 16 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 17 +- subtask: Place the coke on the white box with the left gripper + subtask_index: 18 +- subtask: Place the square wooden block on the white box with the left gripper + subtask_index: 19 +- subtask: Place the blue cup on the white box with the right gripper + subtask_index: 20 +- subtask: Place the duck toy on the white box with the left gripper + subtask_index: 21 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 22 +- subtask: Place the blue pot on the white box with the right gripper + subtask_index: 23 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 24 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 25 +- subtask: Place the round wooden block on the white box with the right gripper + subtask_index: 26 +- subtask: Place the square chewing gum on the white box with the right gripper + subtask_index: 27 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 28 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 29 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 30 +- subtask: Grasp the tin with the left gripper + subtask_index: 31 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 32 +- subtask: Place the back scratcher into the basket with the right gripper + subtask_index: 33 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 34 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 35 +- subtask: Place the banana on the white box with the right gripper + subtask_index: 36 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 37 +- subtask: Place the shower sphere on the white box with the right gripper + subtask_index: 38 +- subtask: Place the green lemon into the basket with the right gripper + subtask_index: 39 +- subtask: Place the compasses on the white box with the right gripper + subtask_index: 40 +- subtask: Place the banana on the white box with the left gripper + subtask_index: 41 +- subtask: Grasp the peach with the left gripper + subtask_index: 42 +- subtask: Place the round wooden block on the white box with the left gripper + subtask_index: 43 +- subtask: Place the tin on the white box with the right gripper + subtask_index: 44 +- subtask: Place the potato chips on the white box with the right gripper + subtask_index: 45 +- subtask: Place the tape on the white box with the left gripper + subtask_index: 46 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 47 +- subtask: Place the blue pot on the white box with the left gripper + subtask_index: 48 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 49 +- subtask: Place the chocolate cake on the white box with the right gripper + subtask_index: 50 +- subtask: Place the compasses on the white box with the left gripper + subtask_index: 51 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 52 +- subtask: Place the soft facial cleanser on the white box with the right gripper + subtask_index: 53 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 54 +- subtask: End + subtask_index: 55 +- subtask: Place the hard facial cleanser on the white box with the left gripper + subtask_index: 56 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 57 +- subtask: Grasp the coke with the left gripper + subtask_index: 58 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 59 +- subtask: Place the square chewing gum on the white box with the left gripper + subtask_index: 60 +- subtask: Place the tape on the white box with the right gripper + subtask_index: 61 +- subtask: Place the shower sphere on the white box with the left gripper + subtask_index: 62 +- subtask: Grasp the compasses with the left gripper + subtask_index: 63 +- subtask: Place the brown towel on the white box with the right gripper + subtask_index: 64 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 65 +- subtask: Grasp the round bread with the right gripper + subtask_index: 66 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 67 +- subtask: Place the duck toy on the white box with the right gripper + subtask_index: 68 +- subtask: Place the chocolate on the white box with the right gripper + subtask_index: 69 +- subtask: Grasp the tin with the right gripper + subtask_index: 70 +- subtask: Place the chocolate cake on the white box with the left gripper + subtask_index: 71 +- subtask: Place the plugboard on the white box with the right gripper + subtask_index: 72 +- subtask: Grasp the tape with the right gripper + subtask_index: 73 +- subtask: Grasp the coke with the right gripper + subtask_index: 74 +- subtask: Grasp the tape with the left gripper + subtask_index: 75 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 76 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 77 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 78 +- subtask: Place the potato chips on the white box with the left gripper + subtask_index: 79 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 80 +- subtask: Place the round bread on the white box with the left gripper + subtask_index: 81 +- subtask: Grasp the round bread with the left gripper + subtask_index: 82 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 83 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 84 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 85 +- subtask: Place the yogurt on the white box with the right gripper + subtask_index: 86 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 87 +- subtask: Place the plugboard on the white box with the left gripper + subtask_index: 88 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 89 +- subtask: Grasp the banana with the right gripper + subtask_index: 90 +- subtask: 'null' + subtask_index: 91 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -314,13 +407,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -328,8 +418,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 27154 fps: 30 @@ -416,11 +505,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_white_box_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -688,7 +775,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -696,7 +783,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -723,363 +809,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_white_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the white box. - sub_tasks: - - subtask: Place the tin on the white box with the left gripper - subtask_index: 0 - - subtask: Grasp the blue pot with the left gripper - subtask_index: 1 - - subtask: Place the square wooden block on the white box with the right gripper - subtask_index: 2 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 3 - - subtask: Place the brown towel on the white box with the left gripper - subtask_index: 4 - - subtask: Place the peach on the white box with the left gripper - subtask_index: 5 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 6 - - subtask: Place the blackboard erasure on the white box with the right gripper - subtask_index: 7 - - subtask: Grasp the banana with the left gripper - subtask_index: 8 - - subtask: Place the round bread on the white box with the right gripper - subtask_index: 9 - - subtask: Grasp the compasses with the right gripper - subtask_index: 10 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 11 - - subtask: Place the bread slice on the white box with the right gripper - subtask_index: 12 - - subtask: Place the blackboard erasure on the white box with the left gripper - subtask_index: 13 - - subtask: Place the bread slice on the white box with the left gripper - subtask_index: 14 - - subtask: Place the coke on the white box with the right gripper - subtask_index: 15 - - subtask: Place the blue cup on the white box with the left gripper - subtask_index: 16 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 17 - - subtask: Place the coke on the white box with the left gripper - subtask_index: 18 - - subtask: Place the square wooden block on the white box with the left gripper - subtask_index: 19 - - subtask: Place the blue cup on the white box with the right gripper - subtask_index: 20 - - subtask: Place the duck toy on the white box with the left gripper - subtask_index: 21 - - subtask: Grasp the back scratcher with the right gripper - subtask_index: 22 - - subtask: Place the blue pot on the white box with the right gripper - subtask_index: 23 - - subtask: Grasp the square chewing gum with the left gripper - subtask_index: 24 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 25 - - subtask: Place the round wooden block on the white box with the right gripper - subtask_index: 26 - - subtask: Place the square chewing gum on the white box with the right gripper - subtask_index: 27 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 28 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 29 - - subtask: Grasp the yogurt with the right gripper - subtask_index: 30 - - subtask: Grasp the tin with the left gripper - subtask_index: 31 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 32 - - subtask: Place the back scratcher into the basket with the right gripper - subtask_index: 33 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 34 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 35 - - subtask: Place the banana on the white box with the right gripper - subtask_index: 36 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 37 - - subtask: Place the shower sphere on the white box with the right gripper - subtask_index: 38 - - subtask: Place the green lemon into the basket with the right gripper - subtask_index: 39 - - subtask: Place the compasses on the white box with the right gripper - subtask_index: 40 - - subtask: Place the banana on the white box with the left gripper - subtask_index: 41 - - subtask: Grasp the peach with the left gripper - subtask_index: 42 - - subtask: Place the round wooden block on the white box with the left gripper - subtask_index: 43 - - subtask: Place the tin on the white box with the right gripper - subtask_index: 44 - - subtask: Place the potato chips on the white box with the right gripper - subtask_index: 45 - - subtask: Place the tape on the white box with the left gripper - subtask_index: 46 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 47 - - subtask: Place the blue pot on the white box with the left gripper - subtask_index: 48 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 49 - - subtask: Place the chocolate cake on the white box with the right gripper - subtask_index: 50 - - subtask: Place the compasses on the white box with the left gripper - subtask_index: 51 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 52 - - subtask: Place the soft facial cleanser on the white box with the right gripper - subtask_index: 53 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 54 - - subtask: End - subtask_index: 55 - - subtask: Place the hard facial cleanser on the white box with the left gripper - subtask_index: 56 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 57 - - subtask: Grasp the coke with the left gripper - subtask_index: 58 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 59 - - subtask: Place the square chewing gum on the white box with the left gripper - subtask_index: 60 - - subtask: Place the tape on the white box with the right gripper - subtask_index: 61 - - subtask: Place the shower sphere on the white box with the left gripper - subtask_index: 62 - - subtask: Grasp the compasses with the left gripper - subtask_index: 63 - - subtask: Place the brown towel on the white box with the right gripper - subtask_index: 64 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 65 - - subtask: Grasp the round bread with the right gripper - subtask_index: 66 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 67 - - subtask: Place the duck toy on the white box with the right gripper - subtask_index: 68 - - subtask: Place the chocolate on the white box with the right gripper - subtask_index: 69 - - subtask: Grasp the tin with the right gripper - subtask_index: 70 - - subtask: Place the chocolate cake on the white box with the left gripper - subtask_index: 71 - - subtask: Place the plugboard on the white box with the right gripper - subtask_index: 72 - - subtask: Grasp the tape with the right gripper - subtask_index: 73 - - subtask: Grasp the coke with the right gripper - subtask_index: 74 - - subtask: Grasp the tape with the left gripper - subtask_index: 75 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 76 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 77 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 78 - - subtask: Place the potato chips on the white box with the left gripper - subtask_index: 79 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 80 - - subtask: Place the round bread on the white box with the left gripper - subtask_index: 81 - - subtask: Grasp the round bread with the left gripper - subtask_index: 82 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 83 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 84 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 85 - - subtask: Place the yogurt on the white box with the right gripper - subtask_index: 86 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 87 - - subtask: Place the plugboard on the white box with the left gripper - subtask_index: 88 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 89 - - subtask: Grasp the banana with the right gripper - subtask_index: 90 - - subtask: 'null' - subtask_index: 91 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 27154 - dataset_size: 1.03 GB - data_structure: 'Galaxea_R1_Lite_storage_object_white_box_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_yellow_basket.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_yellow_basket.yaml index 69466d0f451d7a22e22b416d2568239c0d0e542f..cad1d4334c2eb32c6d5ebfe8889a6c1427abf676 100644 --- a/dataset_info/Galaxea_R1_Lite_storage_object_yellow_basket.yaml +++ b/dataset_info/Galaxea_R1_Lite_storage_object_yellow_basket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: yellow_basket level1: basket level2: yellow_basket @@ -195,118 +195,212 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use a gripper to pick the target object and place on the yellow - basket. +task_instruction: +- use a gripper to pick the target object and place on the yellow basket. sub_tasks: -- Grasp the blue pot with the left gripper -- Grasp the plugboard with the left gripper -- Place the round wooden block on the yellow basket with the left gripper -- Grasp the potato chips with the right gripper -- Place the tin on the yellow basket with the right gripper -- Place the plugboard on the yellow basket with the left gripper -- Place the peach on the yellow basket with the left gripper -- Place the blue cup on the yellow basket with the right gripper -- Place the brown towel on the yellow basket with the right gripper -- Grasp the compasses with the right gripper -- Place the green lemon on the yellow basket with the right gripper -- Place the duck toy on the yellow basket with the right gripper -- Grasp the duck toy with the left gripper -- Place the blackboard erasure on the yellow basket with the left gripper -- Grasp the blue cup with the left gripper -- Place the bread slice on the yellow basket with the left gripper -- Place the hard facial cleanser on the yellow basket with the right gripper -- Place the peach on the yellow basket with the right gripper -- Grasp the back scratcher with the right gripper -- Place the tape on the yellow basket with the right gripper -- Place the blue pot on the yellow basket with the right gripper -- Grasp the chocolate cake with the right gripper -- Place the tape on the yellow basket with the left gripper -- Place the shower sphere on the yellow basket with the left gripper -- Place the yogurt on the yellow basket with the left gripper -- Grasp the shower sphere with the left gripper -- Grasp the plugboard with the right gripper -- Grasp the tin with the left gripper -- Grasp the brown towel with the left gripper -- Place the compasses on the yellow basket with the right gripper -- Place the hard facial cleanser on the yellow basket with the left gripper -- Grasp the hard facial cleanser with the left gripper -- Place the potato chips on the yellow basket with the left gripper -- Grasp the chocolate with the right gripper -- Place the banana on the yellow basket with the right gripper -- Grasp the peach with the right gripper -- Grasp the brown towel with the right gripper -- Place the chocolate cake on the yellow basket with the left gripper -- Place the shower sphere on the yellow basket with the right gripper -- Grasp the hard facial cleanser with the right gripper -- Place the back scratcher on the yellow basket with the right gripper -- Grasp the peach with the left gripper -- Place the blue cup on the yellow basket with the left gripper -- Grasp the green lemon with the right gripper -- Place the soft facial cleanser on the yellow basket with the right gripper -- Grasp the bread slice with the right gripper -- Grasp the potato chips with the left gripper -- Grasp the duck toy with the right gripper -- End -- Grasp the blackboard erasure with the left gripper -- Grasp the coke with the left gripper -- Place the blackboard erasure on the yellow basket with the right gripper -- Grasp the round wooden block with the left gripper -- Place the tin on the yellow basket with the left gripper -- Place the bread slice on the yellow basket with the right gripper -- Place the compasses on the yellow basket with the left gripper -- Place the square wooden block on the yellow basket with the left gripper -- Place the chocolate cake on the yellow basket with the right gripper -- Grasp the compasses with the left gripper -- Place the peach doll on the yellow basket with the left gripper -- Grasp the blue pot with the right gripper -- Grasp the round bread with the right gripper -- Grasp the chocolate cake with the left gripper -- Place the coke on the yellow basket with the left gripper -- Place the duck toy on the yellow basket with the left gripper -- Grasp the tin with the right gripper -- Place the round wooden block on the yellow basket with the right gripper -- Place the square wooden block on the yellow basket with the right gripper -- Grasp the tape with the right gripper -- Grasp the coke with the right gripper -- Place the round bread on the yellow basket with the right gripper -- Grasp the tape with the left gripper -- Grasp the square chewing gum with the right gripper -- Grasp the peach doll with the left gripper -- Grasp the shower sphere with the right gripper -- Place the brown towel on the yellow basket with the left gripper -- Grasp the bread slice with the left gripper -- Grasp the yogurt with the left gripper -- Grasp the blackboard erasure with the right gripper -- Place the coke on the yellow basket with the right gripper -- Place the chocolate on the yellow basket with the right gripper -- Place the potato chips on the yellow basket with the right gripper -- Place the plugboard on the yellow basket with the right gripper -- Place the blue pot on the yellow basket with the left gripper -- Place the round bread on the yellow basket with the left gripper -- Grasp the round bread with the left gripper -- Grasp the square wooden block with the left gripper -- Grasp the blue cup with the right gripper -- Grasp the soft facial cleanser with the right gripper -- Grasp the square wooden block with the right gripper -- Grasp the round wooden block with the right gripper -- Place the square chewing gum on the yellow basket with the right gripper -- Grasp the banana with the right gripper -- 'null' +- subtask: Grasp the blue pot with the left gripper + subtask_index: 0 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 1 +- subtask: Place the round wooden block on the yellow basket with the left gripper + subtask_index: 2 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 3 +- subtask: Place the tin on the yellow basket with the right gripper + subtask_index: 4 +- subtask: Place the plugboard on the yellow basket with the left gripper + subtask_index: 5 +- subtask: Place the peach on the yellow basket with the left gripper + subtask_index: 6 +- subtask: Place the blue cup on the yellow basket with the right gripper + subtask_index: 7 +- subtask: Place the brown towel on the yellow basket with the right gripper + subtask_index: 8 +- subtask: Grasp the compasses with the right gripper + subtask_index: 9 +- subtask: Place the green lemon on the yellow basket with the right gripper + subtask_index: 10 +- subtask: Place the duck toy on the yellow basket with the right gripper + subtask_index: 11 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 12 +- subtask: Place the blackboard erasure on the yellow basket with the left gripper + subtask_index: 13 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 14 +- subtask: Place the bread slice on the yellow basket with the left gripper + subtask_index: 15 +- subtask: Place the hard facial cleanser on the yellow basket with the right gripper + subtask_index: 16 +- subtask: Place the peach on the yellow basket with the right gripper + subtask_index: 17 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 18 +- subtask: Place the tape on the yellow basket with the right gripper + subtask_index: 19 +- subtask: Place the blue pot on the yellow basket with the right gripper + subtask_index: 20 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 21 +- subtask: Place the tape on the yellow basket with the left gripper + subtask_index: 22 +- subtask: Place the shower sphere on the yellow basket with the left gripper + subtask_index: 23 +- subtask: Place the yogurt on the yellow basket with the left gripper + subtask_index: 24 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 25 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 26 +- subtask: Grasp the tin with the left gripper + subtask_index: 27 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 28 +- subtask: Place the compasses on the yellow basket with the right gripper + subtask_index: 29 +- subtask: Place the hard facial cleanser on the yellow basket with the left gripper + subtask_index: 30 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 31 +- subtask: Place the potato chips on the yellow basket with the left gripper + subtask_index: 32 +- subtask: Grasp the chocolate with the right gripper + subtask_index: 33 +- subtask: Place the banana on the yellow basket with the right gripper + subtask_index: 34 +- subtask: Grasp the peach with the right gripper + subtask_index: 35 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 36 +- subtask: Place the chocolate cake on the yellow basket with the left gripper + subtask_index: 37 +- subtask: Place the shower sphere on the yellow basket with the right gripper + subtask_index: 38 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 39 +- subtask: Place the back scratcher on the yellow basket with the right gripper + subtask_index: 40 +- subtask: Grasp the peach with the left gripper + subtask_index: 41 +- subtask: Place the blue cup on the yellow basket with the left gripper + subtask_index: 42 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 43 +- subtask: Place the soft facial cleanser on the yellow basket with the right gripper + subtask_index: 44 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 45 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 46 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 47 +- subtask: End + subtask_index: 48 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 49 +- subtask: Grasp the coke with the left gripper + subtask_index: 50 +- subtask: Place the blackboard erasure on the yellow basket with the right gripper + subtask_index: 51 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 52 +- subtask: Place the tin on the yellow basket with the left gripper + subtask_index: 53 +- subtask: Place the bread slice on the yellow basket with the right gripper + subtask_index: 54 +- subtask: Place the compasses on the yellow basket with the left gripper + subtask_index: 55 +- subtask: Place the square wooden block on the yellow basket with the left gripper + subtask_index: 56 +- subtask: Place the chocolate cake on the yellow basket with the right gripper + subtask_index: 57 +- subtask: Grasp the compasses with the left gripper + subtask_index: 58 +- subtask: Place the peach doll on the yellow basket with the left gripper + subtask_index: 59 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 60 +- subtask: Grasp the round bread with the right gripper + subtask_index: 61 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 62 +- subtask: Place the coke on the yellow basket with the left gripper + subtask_index: 63 +- subtask: Place the duck toy on the yellow basket with the left gripper + subtask_index: 64 +- subtask: Grasp the tin with the right gripper + subtask_index: 65 +- subtask: Place the round wooden block on the yellow basket with the right gripper + subtask_index: 66 +- subtask: Place the square wooden block on the yellow basket with the right gripper + subtask_index: 67 +- subtask: Grasp the tape with the right gripper + subtask_index: 68 +- subtask: Grasp the coke with the right gripper + subtask_index: 69 +- subtask: Place the round bread on the yellow basket with the right gripper + subtask_index: 70 +- subtask: Grasp the tape with the left gripper + subtask_index: 71 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 72 +- subtask: Grasp the peach doll with the left gripper + subtask_index: 73 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 74 +- subtask: Place the brown towel on the yellow basket with the left gripper + subtask_index: 75 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 76 +- subtask: Grasp the yogurt with the left gripper + subtask_index: 77 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 78 +- subtask: Place the coke on the yellow basket with the right gripper + subtask_index: 79 +- subtask: Place the chocolate on the yellow basket with the right gripper + subtask_index: 80 +- subtask: Place the potato chips on the yellow basket with the right gripper + subtask_index: 81 +- subtask: Place the plugboard on the yellow basket with the right gripper + subtask_index: 82 +- subtask: Place the blue pot on the yellow basket with the left gripper + subtask_index: 83 +- subtask: Place the round bread on the yellow basket with the left gripper + subtask_index: 84 +- subtask: Grasp the round bread with the left gripper + subtask_index: 85 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 86 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 87 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 88 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 89 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 90 +- subtask: Place the square chewing gum on the yellow basket with the right gripper + subtask_index: 91 +- subtask: Grasp the banana with the right gripper + subtask_index: 92 +- subtask: 'null' + subtask_index: 93 atomic_actions: - grasp - pick - place -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -317,13 +411,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -331,8 +422,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 102 total_frames: 18153 fps: 30 @@ -419,11 +509,9 @@ data_structure: 'Galaxea_R1_Lite_storage_object_yellow_basket_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:101 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -691,7 +779,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -699,7 +787,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -726,367 +813,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_storage_object_yellow_basket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use a gripper to pick the target object and place on the yellow basket. - sub_tasks: - - subtask: Grasp the blue pot with the left gripper - subtask_index: 0 - - subtask: Grasp the plugboard with the left gripper - subtask_index: 1 - - subtask: Place the round wooden block on the yellow basket with the left gripper - subtask_index: 2 - - subtask: Grasp the potato chips with the right gripper - subtask_index: 3 - - subtask: Place the tin on the yellow basket with the right gripper - subtask_index: 4 - - subtask: Place the plugboard on the yellow basket with the left gripper - subtask_index: 5 - - subtask: Place the peach on the yellow basket with the left gripper - subtask_index: 6 - - subtask: Place the blue cup on the yellow basket with the right gripper - subtask_index: 7 - - subtask: Place the brown towel on the yellow basket with the right gripper - subtask_index: 8 - - subtask: Grasp the compasses with the right gripper - subtask_index: 9 - - subtask: Place the green lemon on the yellow basket with the right gripper - subtask_index: 10 - - subtask: Place the duck toy on the yellow basket with the right gripper - subtask_index: 11 - - subtask: Grasp the duck toy with the left gripper - subtask_index: 12 - - subtask: Place the blackboard erasure on the yellow basket with the left gripper - subtask_index: 13 - - subtask: Grasp the blue cup with the left gripper - subtask_index: 14 - - subtask: Place the bread slice on the yellow basket with the left gripper - subtask_index: 15 - - subtask: Place the hard facial cleanser on the yellow basket with the right gripper - subtask_index: 16 - - subtask: Place the peach on the yellow basket with the right gripper - subtask_index: 17 - - subtask: Grasp the back scratcher with the right gripper - subtask_index: 18 - - subtask: Place the tape on the yellow basket with the right gripper - subtask_index: 19 - - subtask: Place the blue pot on the yellow basket with the right gripper - subtask_index: 20 - - subtask: Grasp the chocolate cake with the right gripper - subtask_index: 21 - - subtask: Place the tape on the yellow basket with the left gripper - subtask_index: 22 - - subtask: Place the shower sphere on the yellow basket with the left gripper - subtask_index: 23 - - subtask: Place the yogurt on the yellow basket with the left gripper - subtask_index: 24 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 25 - - subtask: Grasp the plugboard with the right gripper - subtask_index: 26 - - subtask: Grasp the tin with the left gripper - subtask_index: 27 - - subtask: Grasp the brown towel with the left gripper - subtask_index: 28 - - subtask: Place the compasses on the yellow basket with the right gripper - subtask_index: 29 - - subtask: Place the hard facial cleanser on the yellow basket with the left gripper - subtask_index: 30 - - subtask: Grasp the hard facial cleanser with the left gripper - subtask_index: 31 - - subtask: Place the potato chips on the yellow basket with the left gripper - subtask_index: 32 - - subtask: Grasp the chocolate with the right gripper - subtask_index: 33 - - subtask: Place the banana on the yellow basket with the right gripper - subtask_index: 34 - - subtask: Grasp the peach with the right gripper - subtask_index: 35 - - subtask: Grasp the brown towel with the right gripper - subtask_index: 36 - - subtask: Place the chocolate cake on the yellow basket with the left gripper - subtask_index: 37 - - subtask: Place the shower sphere on the yellow basket with the right gripper - subtask_index: 38 - - subtask: Grasp the hard facial cleanser with the right gripper - subtask_index: 39 - - subtask: Place the back scratcher on the yellow basket with the right gripper - subtask_index: 40 - - subtask: Grasp the peach with the left gripper - subtask_index: 41 - - subtask: Place the blue cup on the yellow basket with the left gripper - subtask_index: 42 - - subtask: Grasp the green lemon with the right gripper - subtask_index: 43 - - subtask: Place the soft facial cleanser on the yellow basket with the right gripper - subtask_index: 44 - - subtask: Grasp the bread slice with the right gripper - subtask_index: 45 - - subtask: Grasp the potato chips with the left gripper - subtask_index: 46 - - subtask: Grasp the duck toy with the right gripper - subtask_index: 47 - - subtask: End - subtask_index: 48 - - subtask: Grasp the blackboard erasure with the left gripper - subtask_index: 49 - - subtask: Grasp the coke with the left gripper - subtask_index: 50 - - subtask: Place the blackboard erasure on the yellow basket with the right gripper - subtask_index: 51 - - subtask: Grasp the round wooden block with the left gripper - subtask_index: 52 - - subtask: Place the tin on the yellow basket with the left gripper - subtask_index: 53 - - subtask: Place the bread slice on the yellow basket with the right gripper - subtask_index: 54 - - subtask: Place the compasses on the yellow basket with the left gripper - subtask_index: 55 - - subtask: Place the square wooden block on the yellow basket with the left gripper - subtask_index: 56 - - subtask: Place the chocolate cake on the yellow basket with the right gripper - subtask_index: 57 - - subtask: Grasp the compasses with the left gripper - subtask_index: 58 - - subtask: Place the peach doll on the yellow basket with the left gripper - subtask_index: 59 - - subtask: Grasp the blue pot with the right gripper - subtask_index: 60 - - subtask: Grasp the round bread with the right gripper - subtask_index: 61 - - subtask: Grasp the chocolate cake with the left gripper - subtask_index: 62 - - subtask: Place the coke on the yellow basket with the left gripper - subtask_index: 63 - - subtask: Place the duck toy on the yellow basket with the left gripper - subtask_index: 64 - - subtask: Grasp the tin with the right gripper - subtask_index: 65 - - subtask: Place the round wooden block on the yellow basket with the right gripper - subtask_index: 66 - - subtask: Place the square wooden block on the yellow basket with the right gripper - subtask_index: 67 - - subtask: Grasp the tape with the right gripper - subtask_index: 68 - - subtask: Grasp the coke with the right gripper - subtask_index: 69 - - subtask: Place the round bread on the yellow basket with the right gripper - subtask_index: 70 - - subtask: Grasp the tape with the left gripper - subtask_index: 71 - - subtask: Grasp the square chewing gum with the right gripper - subtask_index: 72 - - subtask: Grasp the peach doll with the left gripper - subtask_index: 73 - - subtask: Grasp the shower sphere with the right gripper - subtask_index: 74 - - subtask: Place the brown towel on the yellow basket with the left gripper - subtask_index: 75 - - subtask: Grasp the bread slice with the left gripper - subtask_index: 76 - - subtask: Grasp the yogurt with the left gripper - subtask_index: 77 - - subtask: Grasp the blackboard erasure with the right gripper - subtask_index: 78 - - subtask: Place the coke on the yellow basket with the right gripper - subtask_index: 79 - - subtask: Place the chocolate on the yellow basket with the right gripper - subtask_index: 80 - - subtask: Place the potato chips on the yellow basket with the right gripper - subtask_index: 81 - - subtask: Place the plugboard on the yellow basket with the right gripper - subtask_index: 82 - - subtask: Place the blue pot on the yellow basket with the left gripper - subtask_index: 83 - - subtask: Place the round bread on the yellow basket with the left gripper - subtask_index: 84 - - subtask: Grasp the round bread with the left gripper - subtask_index: 85 - - subtask: Grasp the square wooden block with the left gripper - subtask_index: 86 - - subtask: Grasp the blue cup with the right gripper - subtask_index: 87 - - subtask: Grasp the soft facial cleanser with the right gripper - subtask_index: 88 - - subtask: Grasp the square wooden block with the right gripper - subtask_index: 89 - - subtask: Grasp the round wooden block with the right gripper - subtask_index: 90 - - subtask: Place the square chewing gum on the yellow basket with the right gripper - subtask_index: 91 - - subtask: Grasp the banana with the right gripper - subtask_index: 92 - - subtask: 'null' - subtask_index: 93 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 18153 - dataset_size: 742.42 MB - data_structure: 'Galaxea_R1_Lite_storage_object_yellow_basket_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (90 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_toggle_drawer_red.yaml b/dataset_info/Galaxea_R1_Lite_toggle_drawer_red.yaml index 57fdb1fa9548b3de9a44159d65cc20840c1ee475..5e86f2174da853a9f16919a071141cc97bed9dfa 100644 --- a/dataset_info/Galaxea_R1_Lite_toggle_drawer_red.yaml +++ b/dataset_info/Galaxea_R1_Lite_toggle_drawer_red.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: red_drawer level1: storage_utensils level2: red_drawer @@ -39,37 +39,52 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the gripper to open the drawer and close the red drawer. +task_instruction: +- use the gripper to open the drawer and close the red drawer. sub_tasks: -- Open the red drawer with right gripper -- Open the yellow drawer with left gripper -- Close the red drawer with right gripper -- Close the yellow drawer with left gripper -- Open the red drawer with the right gripper -- End -- Open the red drawer with the left gripper -- Open the red drawer with left gripper -- Right gripper -- Close the red drawer with the left gripper -- Close the red drawer with the right gripper -- Close the red drawer with left gripper -- Left gripper -- 'null' +- subtask: Open the red drawer with right gripper + subtask_index: 0 +- subtask: Open the yellow drawer with left gripper + subtask_index: 1 +- subtask: Close the red drawer with right gripper + subtask_index: 2 +- subtask: Close the yellow drawer with left gripper + subtask_index: 3 +- subtask: Open the red drawer with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Open the red drawer with the left gripper + subtask_index: 6 +- subtask: Open the red drawer with left gripper + subtask_index: 7 +- subtask: Right gripper + subtask_index: 8 +- subtask: Close the red drawer with the left gripper + subtask_index: 9 +- subtask: Close the red drawer with the right gripper + subtask_index: 10 +- subtask: Close the red drawer with left gripper + subtask_index: 11 +- subtask: Left gripper + subtask_index: 12 +- subtask: 'null' + subtask_index: 13 atomic_actions: - grasp - push - pull -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -80,13 +95,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +106,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 100 total_frames: 41856 fps: 30 @@ -182,11 +193,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_toggle_drawer_red_qced_hardlink |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:99 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -454,7 +463,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -462,7 +471,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -489,207 +497,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_toggle_drawer_red - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the gripper to open the drawer and close the red drawer. - sub_tasks: - - subtask: Open the red drawer with right gripper - subtask_index: 0 - - subtask: Open the yellow drawer with left gripper - subtask_index: 1 - - subtask: Close the red drawer with right gripper - subtask_index: 2 - - subtask: Close the yellow drawer with left gripper - subtask_index: 3 - - subtask: Open the red drawer with the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Open the red drawer with the left gripper - subtask_index: 6 - - subtask: Open the red drawer with left gripper - subtask_index: 7 - - subtask: Right gripper - subtask_index: 8 - - subtask: Close the red drawer with the left gripper - subtask_index: 9 - - subtask: Close the red drawer with the right gripper - subtask_index: 10 - - subtask: Close the red drawer with left gripper - subtask_index: 11 - - subtask: Left gripper - subtask_index: 12 - - subtask: 'null' - subtask_index: 13 - atomic_actions: - - grasp - - push - - pull - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 41856 - dataset_size: 1.80 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_toggle_drawer_red_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (88 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Galaxea_R1_Lite_toggle_drawer_yellow.yaml b/dataset_info/Galaxea_R1_Lite_toggle_drawer_yellow.yaml index f60f8b1baf7d61ba56358c2daee35de985031612..17ff79e47c826f5371618e8b3da8e0bf2aee111f 100644 --- a/dataset_info/Galaxea_R1_Lite_toggle_drawer_yellow.yaml +++ b/dataset_info/Galaxea_R1_Lite_toggle_drawer_yellow.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: yellow_drawer level1: storage_utensils level2: yellow_drawer @@ -39,35 +39,48 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the gripper to open the drawer and close the yellow drawer. +task_instruction: +- use the gripper to open the drawer and close the yellow drawer. sub_tasks: -- Open the yellow drawer with left gripper -- Close the yellow drawer with left gripper -- Close the yellow drawer with the right gripper -- Open the yellow drawer with the right gripper -- Close the yellow drawer with the left gripper -- Open the yellow drawer with right gripper -- End -- Open the yellow drawer with the left gripper -- Close the yellow drawer with right gripper -- Right gripper -- Left gripper -- 'null' +- subtask: Open the yellow drawer with left gripper + subtask_index: 0 +- subtask: Close the yellow drawer with left gripper + subtask_index: 1 +- subtask: Close the yellow drawer with the right gripper + subtask_index: 2 +- subtask: Open the yellow drawer with the right gripper + subtask_index: 3 +- subtask: Close the yellow drawer with the left gripper + subtask_index: 4 +- subtask: Open the yellow drawer with right gripper + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Open the yellow drawer with the left gripper + subtask_index: 7 +- subtask: Close the yellow drawer with right gripper + subtask_index: 8 +- subtask: Right gripper + subtask_index: 9 +- subtask: Left gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 atomic_actions: - grasp - push - pull -robot_name: Galaxea_R1_Lite +robot_name: +- Galaxea_R1_Lite end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_left_rgb - cam_head_right_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, @@ -78,13 +91,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: end_rotation_dim end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -92,8 +102,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 97 total_frames: 37662 fps: 30 @@ -180,11 +189,9 @@ data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_toggle_drawer_yellow_qced_hardl |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:96 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_left_rgb: dtype: video shape: @@ -452,7 +459,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -460,7 +467,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -487,203 +493,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Galaxea_R1_Lite_toggle_drawer_yellow - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the gripper to open the drawer and close the yellow drawer. - sub_tasks: - - subtask: Open the yellow drawer with left gripper - subtask_index: 0 - - subtask: Close the yellow drawer with left gripper - subtask_index: 1 - - subtask: Close the yellow drawer with the right gripper - subtask_index: 2 - - subtask: Open the yellow drawer with the right gripper - subtask_index: 3 - - subtask: Close the yellow drawer with the left gripper - subtask_index: 4 - - subtask: Open the yellow drawer with right gripper - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: Open the yellow drawer with the left gripper - subtask_index: 7 - - subtask: Close the yellow drawer with right gripper - subtask_index: 8 - - subtask: Right gripper - subtask_index: 9 - - subtask: Left gripper - subtask_index: 10 - - subtask: 'null' - subtask_index: 11 - atomic_actions: - - grasp - - push - - pull - robot_name: - - Galaxea_R1_Lite - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: end_rotation_dim - end_translation_dim: end_translation_dim - annotations: *id009 - statistics: *id010 - frame_num: 37662 - dataset_size: 1.57 GB - data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_toggle_drawer_yellow_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (85 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_left_rgb - - | |-- observation.images.cam_head_right_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml b/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml index 2ea630b1be1e9037f5aa8f442afacc67b63a3ca2..0f82bc179eb41422f651d8803a5cbe8fe0e34356 100644 --- a/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,27 +51,33 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the right gripper grasp the flowers and insert them into the vase. +task_instruction: +- the right gripper grasp the flowers and insert them into the vase. sub_tasks: -- Grasp the pink flower with the right gripper -- Abnormal -- Place the pink flower into the vase with with the right gripper -- End -- 'null' +- subtask: Grasp the pink flower with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Place the pink flower into the vase with with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -79,13 +85,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -93,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 60 total_frames: 22483 fps: 30 @@ -179,11 +181,9 @@ data_structure: 'Realman_RMC-AIDA-L_arrange_flowers_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:59 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -460,7 +460,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -468,7 +468,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -495,187 +494,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_arrange_flowers - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the right gripper grasp the flowers and insert them into the vase. - sub_tasks: - - subtask: Grasp the pink flower with the right gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: Place the pink flower into the vase with with the right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 22483 - dataset_size: 465.54 MB - data_structure: 'Realman_RMC-AIDA-L_arrange_flowers_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (48 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_fold_towel.yaml b/dataset_info/Realman_RMC-AIDA-L_fold_towel.yaml index 5ebc5d5dd1d7366c0ba619294c096c5e1aab1cd5..9f5721a81a01b9c3ae32a9405efa1e786b22bba4 100644 --- a/dataset_info/Realman_RMC-AIDA-L_fold_towel.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_fold_towel.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -45,30 +45,39 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: with both grippers position the towel correctly, the right gripper - grasp the towel and fold it to the left. +task_instruction: +- with both grippers position the towel correctly, the right gripper grasp the towel + and fold it to the left. sub_tasks: -- Grasp the towel with the left gripper -- Move the towel to the center of the table with the right gripper -- End -- Grasp the towel with the right gripper -- Fold the towel with the left gripper -- Move the towel to the center of the table with both grippers -- Fold the towel with the right gripper -- 'null' +- subtask: Grasp the towel with the left gripper + subtask_index: 0 +- subtask: Move the towel to the center of the table with the right gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the towel with the right gripper + subtask_index: 3 +- subtask: Fold the towel with the left gripper + subtask_index: 4 +- subtask: Move the towel to the center of the table with both grippers + subtask_index: 5 +- subtask: Fold the towel with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 atomic_actions: - grasp - fold -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -76,13 +85,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -90,8 +96,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 120 total_frames: 69438 fps: 30 @@ -176,11 +181,9 @@ data_structure: 'Realman_RMC-AIDA-L_fold_towel_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:119 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -457,7 +460,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -465,7 +468,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -492,193 +494,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_fold_towel - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: bathroom - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - with both grippers position the towel correctly, the right gripper grasp the towel - and fold it to the left. - sub_tasks: - - subtask: Grasp the towel with the left gripper - subtask_index: 0 - - subtask: Move the towel to the center of the table with the right gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the towel with the right gripper - subtask_index: 3 - - subtask: Fold the towel with the left gripper - subtask_index: 4 - - subtask: Move the towel to the center of the table with both grippers - subtask_index: 5 - - subtask: Fold the towel with the right gripper - subtask_index: 6 - - subtask: 'null' - subtask_index: 7 - atomic_actions: - - grasp - - fold - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 69438 - dataset_size: 1.08 GB - data_structure: 'Realman_RMC-AIDA-L_fold_towel_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (108 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_hang_clothes.yaml b/dataset_info/Realman_RMC-AIDA-L_hang_clothes.yaml index 11b05f7aace365d7bf07311d6d4456a30b506b07..2c3733f86d0386ef8f5d10d025c790c0947d5fe9 100644 --- a/dataset_info/Realman_RMC-AIDA-L_hang_clothes.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_hang_clothes.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: clothes_drying_rack level1: storage_racks level2: clothes_drying_rack @@ -45,26 +45,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper grasp the clothes and hang them on the clothes - rack. +task_instruction: +- the left gripper grasp the clothes and hang them on the clothes rack. sub_tasks: -- End -- Grasp the hanger with the left gripper -- Place the hanger on the rack with the left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the hanger with the left gripper + subtask_index: 1 +- subtask: Place the hanger on the rack with the left gripper + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 atomic_actions: - pick - place -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -72,13 +76,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -86,8 +87,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 10021 fps: 30 @@ -172,11 +172,9 @@ data_structure: 'Realman_RMC-AIDA-L_hang_clothes_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -453,7 +451,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -461,7 +459,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -488,184 +485,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_hang_clothes - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper grasp the clothes and hang them on the clothes rack. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the hanger with the left gripper - subtask_index: 1 - - subtask: Place the hanger on the rack with the left gripper - subtask_index: 2 - - subtask: 'null' - subtask_index: 3 - atomic_actions: - - pick - - place - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 10021 - dataset_size: 234.72 MB - data_structure: 'Realman_RMC-AIDA-L_hang_clothes_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_pass_bowl.yaml b/dataset_info/Realman_RMC-AIDA-L_pass_bowl.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1aa8511047b3a26a129d0d0c1e08afbe975e3d24 --- /dev/null +++ b/dataset_info/Realman_RMC-AIDA-L_pass_bowl.yaml @@ -0,0 +1,497 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Realman_RMC-AIDA-L_pass_bowl +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the left gripper grasp the bowl and pass it to the right gripper, the right gripper + hold the bowl and place it on the table. +sub_tasks: +- subtask: Abnormal + subtask_index: 0 +- subtask: Place the bowl on the table with the right gripper + subtask_index: 1 +- subtask: Pick up the bowl with the left gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Pass the bowl from the left gripper to the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +- pass +robot_name: +- Realman_RMC-AIDA-L +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 57 + total_frames: 31462 + fps: 30 + total_tasks: 6 + total_videos: 171 + total_chunks: 1 + chunks_size: 1000 + state_dim: 28 + action_dim: 28 + camera_views: 3 + dataset_size: 200.31 MB +frame_num: 31462 +dataset_size: 200.31 MB +data_structure: 'Realman_RMC-AIDA-L_pass_bowl_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (45 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:56 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 28 + names: + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_arm_joint_7_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_arm_joint_7_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 28 + names: + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_arm_joint_7_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_arm_joint_7_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Realman_RMC-AIDA-L_storage_block_basket.yaml b/dataset_info/Realman_RMC-AIDA-L_storage_block_basket.yaml index 74ddf6b52c065f411acf4b4dd3248ebfccd110d3..9d75c6f3041d880a25f166c240424da8e477a984 100644 --- a/dataset_info/Realman_RMC-AIDA-L_storage_block_basket.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_storage_block_basket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,28 +51,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper grasp the basket on the table, the right grippe - pick up the blocks on the table and place it into the basket. +task_instruction: +- the left gripper grasp the basket on the table, the right grippe pick up the blocks + on the table and place it into the basket. sub_tasks: -- End -- Grasp the blue cube with the right gripper -- Place the blue cube into the basket with the right gripper -- Grasp the basket with the left gripper -- 'null' +- subtask: End + subtask_index: 0 +- subtask: Grasp the blue cube with the right gripper + subtask_index: 1 +- subtask: Place the blue cube into the basket with the right gripper + subtask_index: 2 +- subtask: Grasp the basket with the left gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +86,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +97,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 50 total_frames: 19083 fps: 30 @@ -180,11 +182,9 @@ data_structure: 'Realman_RMC-AIDA-L_storage_block_basket_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:49 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -461,7 +461,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -469,7 +469,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -496,188 +495,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_storage_block_basket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: living_room - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper grasp the basket on the table, the right grippe pick up the blocks - on the table and place it into the basket. - sub_tasks: - - subtask: End - subtask_index: 0 - - subtask: Grasp the blue cube with the right gripper - subtask_index: 1 - - subtask: Place the blue cube into the basket with the right gripper - subtask_index: 2 - - subtask: Grasp the basket with the left gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 19083 - dataset_size: 201.40 MB - data_structure: 'Realman_RMC-AIDA-L_storage_block_basket_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (38 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_storage_peach_box.yaml b/dataset_info/Realman_RMC-AIDA-L_storage_peach_box.yaml index 2a1076eb5947f32ebd8f9c4a753dea6e332551f0..99db4cf17fd85ce3444dddfeb661623f915c1a50 100644 --- a/dataset_info/Realman_RMC-AIDA-L_storage_peach_box.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_storage_peach_box.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,29 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper open the box, the right gripper grasp the peach - on the table and place it into the box. +task_instruction: +- the left gripper open the box, the right gripper grasp the peach on the table and + place it into the box. sub_tasks: -- Close the box with the left gripper -- Open the lid with the left gripper -- End -- Grasp the peach with the right gripper -- Place the peach into the box with the right gripper -- 'null' +- subtask: Close the box with the left gripper + subtask_index: 0 +- subtask: Open the lid with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Grasp the peach with the right gripper + subtask_index: 3 +- subtask: Place the peach into the box with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +88,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 118 total_frames: 78472 fps: 30 @@ -181,11 +184,9 @@ data_structure: 'Realman_RMC-AIDA-L_storage_peach_box_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:117 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -462,7 +463,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -470,7 +471,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -497,190 +497,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_storage_peach_box - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper open the box, the right gripper grasp the peach on the table - and place it into the box. - sub_tasks: - - subtask: Close the box with the left gripper - subtask_index: 0 - - subtask: Open the lid with the left gripper - subtask_index: 1 - - subtask: End - subtask_index: 2 - - subtask: Grasp the peach with the right gripper - subtask_index: 3 - - subtask: Place the peach into the box with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 78472 - dataset_size: 751.68 MB - data_structure: 'Realman_RMC-AIDA-L_storage_peach_box_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (106 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_storage_peach_drawer.yaml b/dataset_info/Realman_RMC-AIDA-L_storage_peach_drawer.yaml index b797f860ec68936a533a8f0c08927f9718b6c54c..4fbfb09c15aafb9d8e41e7c5785f1276ab14439e 100644 --- a/dataset_info/Realman_RMC-AIDA-L_storage_peach_drawer.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_storage_peach_drawer.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,28 +51,34 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the right gripper grasp the peach on the table and place it into - the drawer, the left gripper close the drawer. +task_instruction: +- the right gripper grasp the peach on the table and place it into the drawer, the + left gripper close the drawer. sub_tasks: -- Close the drawer with the left gripper -- Place the peach into the top drawer with the right gripper -- end -- Grasp the peach with the right gripper -- 'null' +- subtask: Close the drawer with the left gripper + subtask_index: 0 +- subtask: Place the peach into the top drawer with the right gripper + subtask_index: 1 +- subtask: end + subtask_index: 2 +- subtask: Grasp the peach with the right gripper + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 atomic_actions: - grasp - pick - place -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -80,13 +86,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -94,8 +97,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 120 total_frames: 46600 fps: 30 @@ -180,11 +182,9 @@ data_structure: 'Realman_RMC-AIDA-L_storage_peach_drawer_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:119 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -461,7 +461,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -469,7 +469,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -496,188 +495,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_storage_peach_drawer - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the right gripper grasp the peach on the table and place it into the drawer, the - left gripper close the drawer. - sub_tasks: - - subtask: Close the drawer with the left gripper - subtask_index: 0 - - subtask: Place the peach into the top drawer with the right gripper - subtask_index: 1 - - subtask: end - subtask_index: 2 - - subtask: Grasp the peach with the right gripper - subtask_index: 3 - - subtask: 'null' - subtask_index: 4 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 46600 - dataset_size: 596.03 MB - data_structure: 'Realman_RMC-AIDA-L_storage_peach_drawer_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (108 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/Realman_RMC-AIDA-L_storage_towel_basket.yaml b/dataset_info/Realman_RMC-AIDA-L_storage_towel_basket.yaml index e77e1c971453c908eda633d499ff77cd0a97dc74..05d7aaf5b83b72a4ee5680328540df25bc4f9e6b 100644 --- a/dataset_info/Realman_RMC-AIDA-L_storage_towel_basket.yaml +++ b/dataset_info/Realman_RMC-AIDA-L_storage_towel_basket.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: furniture level2: table @@ -51,29 +51,36 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: the left gripper grasp the basket on the table, the right grippe - pick up the towel on the table and place it into the basket. +task_instruction: +- the left gripper grasp the basket on the table, the right grippe pick up the towel + on the table and place it into the basket. sub_tasks: -- Place the towel into the basket with the right gripper -- Abnormal -- PIck up the basket with the left gripper -- PIck up the towel with the right gripper -- end -- 'null' +- subtask: Place the towel into the basket with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: PIck up the basket with the left gripper + subtask_index: 2 +- subtask: PIck up the towel with the right gripper + subtask_index: 3 +- subtask: end + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - pick - place -robot_name: Realman_RMC-AIDA-L +robot_name: +- Realman_RMC-AIDA-L end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -81,13 +88,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -95,8 +99,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 118 total_frames: 58520 fps: 30 @@ -181,11 +184,9 @@ data_structure: 'Realman_RMC-AIDA-L_storage_towel_basket_qced_hardlink/ |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:117 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -462,7 +463,7 @@ features: &id012 shape: - 2 dtype: int32 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -470,7 +471,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -497,190 +497,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: Realman_RMC-AIDA-L_storage_towel_basket - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: household - level2: kitchen - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - the left gripper grasp the basket on the table, the right grippe pick up the towel - on the table and place it into the basket. - sub_tasks: - - subtask: Place the towel into the basket with the right gripper - subtask_index: 0 - - subtask: Abnormal - subtask_index: 1 - - subtask: PIck up the basket with the left gripper - subtask_index: 2 - - subtask: PIck up the towel with the right gripper - subtask_index: 3 - - subtask: end - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - pick - - place - robot_name: - - Realman_RMC-AIDA-L - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 58520 - dataset_size: 684.59 MB - data_structure: 'Realman_RMC-AIDA-L_storage_towel_basket_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (106 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml index 207379e35fb800c444e1ebe1efb1b2ae5f325189..3cc4e4bf0906092380f41991429cc33ec828a338 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,32 +141,40 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to pick up the item and transfer it from the - left gripper to the right gripper. +task_instruction: +- use the left gripper to pick up the item and transfer it from the left gripper to + the right gripper. sub_tasks: -- Grasp the XX with the right gripper -- Place the XX on the table with the left gripper -- Place the XX on the table with the right gripper -- Grasp the XX with the left gripper -- End -- Pass the xx to the right gripper -- 'null' +- subtask: Grasp the XX with the right gripper + subtask_index: 0 +- subtask: Place the XX on the table with the left gripper + subtask_index: 1 +- subtask: Place the XX on the table with the right gripper + subtask_index: 2 +- subtask: Grasp the XX with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Pass the xx to the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -174,13 +182,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -188,8 +193,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 60078 fps: 30 @@ -274,11 +278,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_black_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -551,7 +553,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -559,7 +561,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -586,194 +587,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_left_to_right_black_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to pick up the item and transfer it from the left gripper - to the right gripper. - sub_tasks: - - subtask: Grasp the XX with the right gripper - subtask_index: 0 - - subtask: Place the XX on the table with the left gripper - subtask_index: 1 - - subtask: Place the XX on the table with the right gripper - subtask_index: 2 - - subtask: Grasp the XX with the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Pass the xx to the right gripper - subtask_index: 5 - - subtask: 'null' - subtask_index: 6 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 60078 - dataset_size: 1.18 GB - data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_black_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_green_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_green_tablecloth.yaml index e3c050d9ee3605f77d3f291b3e7c27c525ee564d..2a079b0f47791e98d4436db33c1cfd901f9c7a28 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_green_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_green_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,60 +141,99 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to pick up the item and transfer it from the - left gripper to the right gripper. +task_instruction: +- use the left gripper to pick up the item and transfer it from the left gripper to + the right gripper. sub_tasks: -- Pass the purple garbage bag to the right gripper -- Pass the eggplant to the right gripper -- The right gripper places eggplant on the right side of the table -- Use the left gripper to grab the eggplant on the left side of the table -- End -- Pass the yogurt to the right gripper -- The right gripper places banana on the right side of the table -- Use the left gripper to grab the cleaning agent on the left side of the table -- Use the left gripper to grab the milk on the left side of the table -- Use the left gripper to grab the purple garbage bag on the left side of the table -- Use the left gripper to grab the yogurt on the left side of the table -- Use the left gripper to grab the eyeglass case on the left side of the table -- The right gripper places cleaning agent on the right side of the table -- Pass the cleaning agent to the right gripper -- Use the left gripper to grab the xx on the left side of the table -- The right gripper places xx on the right side of the table -- The right gripper places yogurt on the right side of the table -- The right gripper places Rubik's Cube on the right side of the table -- Pass the Rubik's Cube to the right gripper -- Pass the eyeglass case to the right gripper -- Pass the long bread to the right gripper -- The right gripper places grape on the right side of the table -- The right gripper places long bread on the right side of the table -- Use the left gripper to grab the grape on the left side of the table -- Use the left gripper to grab the long bread on the left side of the table -- The right gripper places purple garbage bag on the right side of the table -- Use the left gripper to grab the Rubik's Cube on the left side of the table -- Pass the banana to the right gripper -- The right gripper places milk on the right side of the table -- Pass the milk to the right gripper -- The right gripper places eyeglass case on the right side of the table -- Pass the grape to the right gripper -- Use the left gripper to grab the banana on the left side of the table -- Pass the xx to the right gripper -- 'null' +- subtask: Pass the purple garbage bag to the right gripper + subtask_index: 0 +- subtask: Pass the eggplant to the right gripper + subtask_index: 1 +- subtask: The right gripper places eggplant on the right side of the table + subtask_index: 2 +- subtask: Use the left gripper to grab the eggplant on the left side of the table + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Pass the yogurt to the right gripper + subtask_index: 5 +- subtask: The right gripper places banana on the right side of the table + subtask_index: 6 +- subtask: Use the left gripper to grab the cleaning agent on the left side of the + table + subtask_index: 7 +- subtask: Use the left gripper to grab the milk on the left side of the table + subtask_index: 8 +- subtask: Use the left gripper to grab the purple garbage bag on the left side of + the table + subtask_index: 9 +- subtask: Use the left gripper to grab the yogurt on the left side of the table + subtask_index: 10 +- subtask: Use the left gripper to grab the eyeglass case on the left side of the + table + subtask_index: 11 +- subtask: The right gripper places cleaning agent on the right side of the table + subtask_index: 12 +- subtask: Pass the cleaning agent to the right gripper + subtask_index: 13 +- subtask: Use the left gripper to grab the xx on the left side of the table + subtask_index: 14 +- subtask: The right gripper places xx on the right side of the table + subtask_index: 15 +- subtask: The right gripper places yogurt on the right side of the table + subtask_index: 16 +- subtask: The right gripper places Rubik's Cube on the right side of the table + subtask_index: 17 +- subtask: Pass the Rubik's Cube to the right gripper + subtask_index: 18 +- subtask: Pass the eyeglass case to the right gripper + subtask_index: 19 +- subtask: Pass the long bread to the right gripper + subtask_index: 20 +- subtask: The right gripper places grape on the right side of the table + subtask_index: 21 +- subtask: The right gripper places long bread on the right side of the table + subtask_index: 22 +- subtask: Use the left gripper to grab the grape on the left side of the table + subtask_index: 23 +- subtask: Use the left gripper to grab the long bread on the left side of the table + subtask_index: 24 +- subtask: The right gripper places purple garbage bag on the right side of the table + subtask_index: 25 +- subtask: Use the left gripper to grab the Rubik's Cube on the left side of the table + subtask_index: 26 +- subtask: Pass the banana to the right gripper + subtask_index: 27 +- subtask: The right gripper places milk on the right side of the table + subtask_index: 28 +- subtask: Pass the milk to the right gripper + subtask_index: 29 +- subtask: The right gripper places eyeglass case on the right side of the table + subtask_index: 30 +- subtask: Pass the grape to the right gripper + subtask_index: 31 +- subtask: Use the left gripper to grab the banana on the left side of the table + subtask_index: 32 +- subtask: Pass the xx to the right gripper + subtask_index: 33 +- subtask: 'null' + subtask_index: 34 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -202,13 +241,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -216,8 +252,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 96 total_frames: 64995 fps: 30 @@ -302,11 +337,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_green_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:95 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -579,7 +612,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -587,7 +620,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -614,255 +646,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_left_to_right_green_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to pick up the item and transfer it from the left gripper - to the right gripper. - sub_tasks: - - subtask: Pass the purple garbage bag to the right gripper - subtask_index: 0 - - subtask: Pass the eggplant to the right gripper - subtask_index: 1 - - subtask: The right gripper places eggplant on the right side of the table - subtask_index: 2 - - subtask: Use the left gripper to grab the eggplant on the left side of the table - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Pass the yogurt to the right gripper - subtask_index: 5 - - subtask: The right gripper places banana on the right side of the table - subtask_index: 6 - - subtask: Use the left gripper to grab the cleaning agent on the left side of the - table - subtask_index: 7 - - subtask: Use the left gripper to grab the milk on the left side of the table - subtask_index: 8 - - subtask: Use the left gripper to grab the purple garbage bag on the left side - of the table - subtask_index: 9 - - subtask: Use the left gripper to grab the yogurt on the left side of the table - subtask_index: 10 - - subtask: Use the left gripper to grab the eyeglass case on the left side of the - table - subtask_index: 11 - - subtask: The right gripper places cleaning agent on the right side of the table - subtask_index: 12 - - subtask: Pass the cleaning agent to the right gripper - subtask_index: 13 - - subtask: Use the left gripper to grab the xx on the left side of the table - subtask_index: 14 - - subtask: The right gripper places xx on the right side of the table - subtask_index: 15 - - subtask: The right gripper places yogurt on the right side of the table - subtask_index: 16 - - subtask: The right gripper places Rubik's Cube on the right side of the table - subtask_index: 17 - - subtask: Pass the Rubik's Cube to the right gripper - subtask_index: 18 - - subtask: Pass the eyeglass case to the right gripper - subtask_index: 19 - - subtask: Pass the long bread to the right gripper - subtask_index: 20 - - subtask: The right gripper places grape on the right side of the table - subtask_index: 21 - - subtask: The right gripper places long bread on the right side of the table - subtask_index: 22 - - subtask: Use the left gripper to grab the grape on the left side of the table - subtask_index: 23 - - subtask: Use the left gripper to grab the long bread on the left side of the table - subtask_index: 24 - - subtask: The right gripper places purple garbage bag on the right side of the - table - subtask_index: 25 - - subtask: Use the left gripper to grab the Rubik's Cube on the left side of the - table - subtask_index: 26 - - subtask: Pass the banana to the right gripper - subtask_index: 27 - - subtask: The right gripper places milk on the right side of the table - subtask_index: 28 - - subtask: Pass the milk to the right gripper - subtask_index: 29 - - subtask: The right gripper places eyeglass case on the right side of the table - subtask_index: 30 - - subtask: Pass the grape to the right gripper - subtask_index: 31 - - subtask: Use the left gripper to grab the banana on the left side of the table - subtask_index: 32 - - subtask: Pass the xx to the right gripper - subtask_index: 33 - - subtask: 'null' - subtask_index: 34 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 64995 - dataset_size: 795.75 MB - data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_green_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (84 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth.yaml index 938f39b883d89aa38f09bbb26f15758f9ebf4139..71b9c5e858a6ee25c02f84451e3505a9ebd257fa 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,67 +141,122 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to pick up the item and transfer it from the - left gripper to the right gripper. +task_instruction: +- use the left gripper to pick up the item and transfer it from the left gripper to + the right gripper. sub_tasks: -- Pass the purple garbage bag to the right gripper -- Pass the shower sphere to the right gripper -- Place the milk on the table with the right gripper -- Place the XX on the table with the right gripper -- Pass the yogurt to the right gripper -- End -- Pass the milk to the right gripper -- Place the purple garbage bag on the table with the right gripper -- Place the Rubik's Cube on the table with the right gripper -- Grasp the blue blackboard erasure with the left gripper -- Grasp the shower sphere with the left gripper -- Grasp the plush banana with the left gripper -- Grasp the yogurt with the left gripper -- Grasp the milk with the left gripper -- Pass the plush banana to the right gripper -- Place the blue blackboard erasure on the table with the right gripper -- Abnormal -- Grasp the XX with the left gripper -- Grasp the blue blackboard erasure with the left grippe -- Pass the Rubik's Cube to the right gripper -- Place the milk on the table with the right gripper -- Pass the long bread to the right gripper -- Pass the milk to the right gripper -- Pass the long bread to the right gripper -- Grasp the milk with the left gripper -- Pass the blue blackboard erasure to the right gripper -- Place the long bread on the table with the right gripper -- Grasp the long bread with the left gripper -- Grasp the Rubik's Cube with the left gripper -- Place the yogurt on the table with the right gripper -- Place the plush banana on the table with the right gripper -- Pass the milk to the right gripper -- Place the grape on the table with the right gripper -- Grasp the long bread with the left gripper -- Pass the milk to the right gripper -- Pass the grape to the right gripper -- Pass the blue garbage bag to the right gripper -- Pass the xx to the right gripper -- Grasp the grape with the left gripper -- Grasp the purple garbage bag with the left gripper -- Place the shower sphere on the table with the right gripper -- 'null' +- subtask: Pass the purple garbage bag to the right gripper + subtask_index: 0 +- subtask: Pass the shower sphere to the right gripper + subtask_index: 1 +- subtask: Place the milk on the table with the right gripper + subtask_index: 2 +- subtask: Place the XX on the table with the right gripper + subtask_index: 3 +- subtask: Pass the yogurt to the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Pass the milk to the right gripper + subtask_index: 6 +- subtask: Place the purple garbage bag on the table with the right gripper + subtask_index: 7 +- subtask: Place the Rubik's Cube on the table with the right gripper + subtask_index: 8 +- subtask: Grasp the blue blackboard erasure with the left gripper + subtask_index: 9 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 10 +- subtask: Grasp the plush banana with the left gripper + subtask_index: 11 +- subtask: Grasp the yogurt with the left gripper + subtask_index: 12 +- subtask: 'Grasp the milk with the left gripper + + ' + subtask_index: 13 +- subtask: Pass the plush banana to the right gripper + subtask_index: 14 +- subtask: Place the blue blackboard erasure on the table with the right gripper + subtask_index: 15 +- subtask: Abnormal + subtask_index: 16 +- subtask: Grasp the XX with the left gripper + subtask_index: 17 +- subtask: Grasp the blue blackboard erasure with the left grippe + subtask_index: 18 +- subtask: Pass the Rubik's Cube to the right gripper + subtask_index: 19 +- subtask: 'Place the milk on the table with the right gripper + + ' + subtask_index: 20 +- subtask: 'Pass the long bread to the right gripper + + ' + subtask_index: 21 +- subtask: ' + + Pass the milk to the right gripper' + subtask_index: 22 +- subtask: Pass the long bread to the right gripper + subtask_index: 23 +- subtask: Grasp the milk with the left gripper + subtask_index: 24 +- subtask: Pass the blue blackboard erasure to the right gripper + subtask_index: 25 +- subtask: Place the long bread on the table with the right gripper + subtask_index: 26 +- subtask: Grasp the long bread with the left gripper + subtask_index: 27 +- subtask: Grasp the Rubik's Cube with the left gripper + subtask_index: 28 +- subtask: Place the yogurt on the table with the right gripper + subtask_index: 29 +- subtask: Place the plush banana on the table with the right gripper + subtask_index: 30 +- subtask: 'Pass the milk to the right gripper + + ' + subtask_index: 31 +- subtask: Place the grape on the table with the right gripper + subtask_index: 32 +- subtask: 'Grasp the long bread with the left gripper + + ' + subtask_index: 33 +- subtask: Pass the milk to the right gripper + subtask_index: 34 +- subtask: Pass the grape to the right gripper + subtask_index: 35 +- subtask: Pass the blue garbage bag to the right gripper + subtask_index: 36 +- subtask: Pass the xx to the right gripper + subtask_index: 37 +- subtask: Grasp the grape with the left gripper + subtask_index: 38 +- subtask: Grasp the purple garbage bag with the left gripper + subtask_index: 39 +- subtask: Place the shower sphere on the table with the right gripper + subtask_index: 40 +- subtask: 'null' + subtask_index: 41 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -209,13 +264,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -223,8 +275,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 97 total_frames: 53653 fps: 30 @@ -309,11 +360,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_khaki_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:96 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -586,7 +635,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -594,7 +643,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -621,276 +669,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to pick up the item and transfer it from the left gripper - to the right gripper. - sub_tasks: - - subtask: Pass the purple garbage bag to the right gripper - subtask_index: 0 - - subtask: Pass the shower sphere to the right gripper - subtask_index: 1 - - subtask: Place the milk on the table with the right gripper - subtask_index: 2 - - subtask: Place the XX on the table with the right gripper - subtask_index: 3 - - subtask: Pass the yogurt to the right gripper - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Pass the milk to the right gripper - subtask_index: 6 - - subtask: Place the purple garbage bag on the table with the right gripper - subtask_index: 7 - - subtask: Place the Rubik's Cube on the table with the right gripper - subtask_index: 8 - - subtask: Grasp the blue blackboard erasure with the left gripper - subtask_index: 9 - - subtask: Grasp the shower sphere with the left gripper - subtask_index: 10 - - subtask: Grasp the plush banana with the left gripper - subtask_index: 11 - - subtask: Grasp the yogurt with the left gripper - subtask_index: 12 - - subtask: 'Grasp the milk with the left gripper - - ' - subtask_index: 13 - - subtask: Pass the plush banana to the right gripper - subtask_index: 14 - - subtask: Place the blue blackboard erasure on the table with the right gripper - subtask_index: 15 - - subtask: Abnormal - subtask_index: 16 - - subtask: Grasp the XX with the left gripper - subtask_index: 17 - - subtask: Grasp the blue blackboard erasure with the left grippe - subtask_index: 18 - - subtask: Pass the Rubik's Cube to the right gripper - subtask_index: 19 - - subtask: 'Place the milk on the table with the right gripper - - ' - subtask_index: 20 - - subtask: 'Pass the long bread to the right gripper - - ' - subtask_index: 21 - - subtask: ' - - Pass the milk to the right gripper' - subtask_index: 22 - - subtask: Pass the long bread to the right gripper - subtask_index: 23 - - subtask: Grasp the milk with the left gripper - subtask_index: 24 - - subtask: Pass the blue blackboard erasure to the right gripper - subtask_index: 25 - - subtask: Place the long bread on the table with the right gripper - subtask_index: 26 - - subtask: Grasp the long bread with the left gripper - subtask_index: 27 - - subtask: Grasp the Rubik's Cube with the left gripper - subtask_index: 28 - - subtask: Place the yogurt on the table with the right gripper - subtask_index: 29 - - subtask: Place the plush banana on the table with the right gripper - subtask_index: 30 - - subtask: 'Pass the milk to the right gripper - - ' - subtask_index: 31 - - subtask: Place the grape on the table with the right gripper - subtask_index: 32 - - subtask: 'Grasp the long bread with the left gripper - - ' - subtask_index: 33 - - subtask: Pass the milk to the right gripper - subtask_index: 34 - - subtask: Pass the grape to the right gripper - subtask_index: 35 - - subtask: Pass the blue garbage bag to the right gripper - subtask_index: 36 - - subtask: Pass the xx to the right gripper - subtask_index: 37 - - subtask: Grasp the grape with the left gripper - subtask_index: 38 - - subtask: Grasp the purple garbage bag with the left gripper - subtask_index: 39 - - subtask: Place the shower sphere on the table with the right gripper - subtask_index: 40 - - subtask: 'null' - subtask_index: 41 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 53653 - dataset_size: 636.04 MB - data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_khaki_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (85 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml index ba4fda880c578becd764784b6dcfff840e33c87c..c5a1816bbc6cc3519011caa7af32555a274768da 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_white_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,31 +141,38 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the left gripper to pick up the item and transfer it from the - left gripper to the right gripper. +task_instruction: +- use the left gripper to pick up the item and transfer it from the left gripper to + the right gripper. sub_tasks: -- Unlabeled -- Grasp the Rubik's Cube with the left gripper -- Pass the Rubik's Cube to the right gripper -- End -- Place the Rubik's Cube on the table with the right gripper -- 'null' +- subtask: Unlabeled + subtask_index: 0 +- subtask: Grasp the Rubik's Cube with the left gripper + subtask_index: 1 +- subtask: Pass the Rubik's Cube to the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: Place the Rubik's Cube on the table with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -173,13 +180,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -187,8 +191,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 96 total_frames: 55704 fps: 30 @@ -273,11 +276,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_white_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:95 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -550,7 +551,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -558,7 +559,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -585,192 +585,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_left_to_right_white_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the left gripper to pick up the item and transfer it from the left gripper - to the right gripper. - sub_tasks: - - subtask: Unlabeled - subtask_index: 0 - - subtask: Grasp the Rubik's Cube with the left gripper - subtask_index: 1 - - subtask: Pass the Rubik's Cube to the right gripper - subtask_index: 2 - - subtask: End - subtask_index: 3 - - subtask: Place the Rubik's Cube on the table with the right gripper - subtask_index: 4 - - subtask: 'null' - subtask_index: 5 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 55704 - dataset_size: 546.67 MB - data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_white_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (84 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_black_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_black_tablecloth.yaml index b6da421c49bedae2ffd175d2ee29991b2757b3bf..70f4a69a67fcf67b33aa6847d8cab0948f9e1770 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_black_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_black_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,57 +141,98 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to pick up the item and transfer it from the - right gripper to the left gripper. +task_instruction: +- use the right gripper to pick up the item and transfer it from the right gripper + to the left gripper. sub_tasks: -- The left gripper places milk on the left side of the table -- The left gripper places bread on the left side of the table -- Pass the bread to the left gripper -- Pass the shower sphere to the left gripper -- Use the right gripper to grab the bread on the right side of the table -- Use the right gripper to grab the grape on the right side of the table -- End -- Use the right gripper to grab the banana on the right side of the table -- Pass the purple garbage bag to the left gripper -- Pass the Rubik's Cube to the left gripper -- The left gripper places yogurt on the left side of the table -- The left gripper places shower sphere on the left side of the table -- Use the right gripper to grab the shower sphere on the right side of the table -- Use the right gripper to grab the grapes on the right side of the table -- Pass the yogurt to the left gripper -- The left gripper places grape on the left side of the table -- The left gripper places grapes on the left side of the table -- Use the right gripper to grab the milk on the right side of the table -- Pass the milk to the left gripper -- The left gripper places banana on the left side of the table -- Use the right gripper to grab the eyeglass case on the right side of the table -- Pass the banana to the left gripper -- Pass the grapes to the left gripper -- Use the right gripper to grab the purple garbage bag on the right side of the table -- Pass the grape to the left gripper -- The left gripper places eyeglass case on the left side of the table -- Pass the eyeglass case to the left gripper -- Use the right gripper to grab the yogurt on the right side of the table -- Use the right gripper to grab the Rubik's Cube on the right side of the table -- Pass the milk to the left gripper -- The left gripper places purple garbage bag on the left side of the table -- 'null' +- subtask: The left gripper places milk on the left side of the table + subtask_index: 0 +- subtask: The left gripper places bread on the left side of the table + subtask_index: 1 +- subtask: Pass the bread to the left gripper + subtask_index: 2 +- subtask: Pass the shower sphere to the left gripper + subtask_index: 3 +- subtask: Use the right gripper to grab the bread on the right side of the table + subtask_index: 4 +- subtask: Use the right gripper to grab the grape on the right side of the table + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Use the right gripper to grab the banana on the right side of the table + subtask_index: 7 +- subtask: Pass the purple garbage bag to the left gripper + subtask_index: 8 +- subtask: Pass the Rubik's Cube to the left gripper + subtask_index: 9 +- subtask: The left gripper places yogurt on the left side of the table + subtask_index: 10 +- subtask: The left gripper places shower sphere on the left side of the table + subtask_index: 11 +- subtask: Use the right gripper to grab the shower sphere on the right side of the + table + subtask_index: 12 +- subtask: Use the right gripper to grab the grapes on the right side of the table + subtask_index: 13 +- subtask: Pass the yogurt to the left gripper + subtask_index: 14 +- subtask: The left gripper places grape on the left side of the table + subtask_index: 15 +- subtask: The left gripper places grapes on the left side of the table + subtask_index: 16 +- subtask: Use the right gripper to grab the milk on the right side of the table + subtask_index: 17 +- subtask: Pass the milk to the left gripper + subtask_index: 18 +- subtask: The left gripper places banana on the left side of the table + subtask_index: 19 +- subtask: Use the right gripper to grab the eyeglass case on the right side of the + table + subtask_index: 20 +- subtask: Pass the banana to the left gripper + subtask_index: 21 +- subtask: Pass the grapes to the left gripper + subtask_index: 22 +- subtask: Use the right gripper to grab the purple garbage bag on the right side + of the table + subtask_index: 23 +- subtask: Pass the grape to the left gripper + subtask_index: 24 +- subtask: The left gripper places eyeglass case on the left side of the table + subtask_index: 25 +- subtask: Pass the eyeglass case to the left gripper + subtask_index: 26 +- subtask: Use the right gripper to grab the yogurt on the right side of the table + subtask_index: 27 +- subtask: Use the right gripper to grab the Rubik's Cube on the right side of the + table + subtask_index: 28 +- subtask: ' + + Pass the milk to the left gripper + + ' + subtask_index: 29 +- subtask: The left gripper places purple garbage bag on the left side of the table + subtask_index: 30 +- subtask: 'null' + subtask_index: 31 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -199,13 +240,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -213,8 +251,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 93 total_frames: 59541 fps: 30 @@ -299,11 +336,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_black_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:92 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -576,7 +611,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -584,7 +619,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -611,252 +645,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_right_to_left_black_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to pick up the item and transfer it from the right gripper - to the left gripper. - sub_tasks: - - subtask: The left gripper places milk on the left side of the table - subtask_index: 0 - - subtask: The left gripper places bread on the left side of the table - subtask_index: 1 - - subtask: Pass the bread to the left gripper - subtask_index: 2 - - subtask: Pass the shower sphere to the left gripper - subtask_index: 3 - - subtask: Use the right gripper to grab the bread on the right side of the table - subtask_index: 4 - - subtask: Use the right gripper to grab the grape on the right side of the table - subtask_index: 5 - - subtask: End - subtask_index: 6 - - subtask: Use the right gripper to grab the banana on the right side of the table - subtask_index: 7 - - subtask: Pass the purple garbage bag to the left gripper - subtask_index: 8 - - subtask: Pass the Rubik's Cube to the left gripper - subtask_index: 9 - - subtask: The left gripper places yogurt on the left side of the table - subtask_index: 10 - - subtask: The left gripper places shower sphere on the left side of the table - subtask_index: 11 - - subtask: Use the right gripper to grab the shower sphere on the right side of - the table - subtask_index: 12 - - subtask: Use the right gripper to grab the grapes on the right side of the table - subtask_index: 13 - - subtask: Pass the yogurt to the left gripper - subtask_index: 14 - - subtask: The left gripper places grape on the left side of the table - subtask_index: 15 - - subtask: The left gripper places grapes on the left side of the table - subtask_index: 16 - - subtask: Use the right gripper to grab the milk on the right side of the table - subtask_index: 17 - - subtask: Pass the milk to the left gripper - subtask_index: 18 - - subtask: The left gripper places banana on the left side of the table - subtask_index: 19 - - subtask: Use the right gripper to grab the eyeglass case on the right side of - the table - subtask_index: 20 - - subtask: Pass the banana to the left gripper - subtask_index: 21 - - subtask: Pass the grapes to the left gripper - subtask_index: 22 - - subtask: Use the right gripper to grab the purple garbage bag on the right side - of the table - subtask_index: 23 - - subtask: Pass the grape to the left gripper - subtask_index: 24 - - subtask: The left gripper places eyeglass case on the left side of the table - subtask_index: 25 - - subtask: Pass the eyeglass case to the left gripper - subtask_index: 26 - - subtask: Use the right gripper to grab the yogurt on the right side of the table - subtask_index: 27 - - subtask: Use the right gripper to grab the Rubik's Cube on the right side of the - table - subtask_index: 28 - - subtask: ' - - Pass the milk to the left gripper - - ' - subtask_index: 29 - - subtask: The left gripper places purple garbage bag on the left side of the table - subtask_index: 30 - - subtask: 'null' - subtask_index: 31 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 59541 - dataset_size: 1.15 GB - data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_black_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (81 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_green_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_green_tablecloth.yaml index a5c41aad7081761e247195eee1c3597d09895645..89dfb2cb95b1fe81a48e0e2bd7e11aacaf2287f7 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_green_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_green_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,78 +141,174 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to pick up the item and transfer it from the - right gripper to the left gripper. +task_instruction: +- use the right gripper to pick up the item and transfer it from the right gripper + to the left gripper. sub_tasks: -- The left gripper places milk on the left side of the table -- The left gripper places bread on the left side of the table -- Use the right gripper to grab the shampoo on the right side of the table -- Use the right to grab the bread on the right side of table -- Pass the bread to the left gripper -- Use the right gripper to grab the bread on the right side of the table -- The left gripper places grape on the left side of the table -- End -- The left gripper places bread on the left side of the table -- Pass the eggplant to the left gripper -- The left gripper places yogurt on the left side of the table -- Pass the purple garbage bag to the left gripper -- Use the right gripper to grab the xbreadon the right side of the table -- The left gripper places yogurt on the left side of the table -- Pass the Rubik's Cube to the left gripper -- Use the right gripper to grab the grape on the right side of the table -- Use the right gripper to grab the grape on the right side of the table -- Pass the yogurt to the left gripper -- Pass the bread to the left gripper -- Use the right gripper to grab the yogurt on the right side of the table -- Pass the grape to the left grippe -- Use the right gripper to grab the milk on the right side of the table -- The left gripper places shampoo on the left side of the table -- The left gripper places grape on the left side of the table -- The left gripper places eggplant on the left side of the table -- Use the right gripper to grab the milk on the right side of the table -- Pass the milk to the left gripper -- The left gripper places banana on the left side of the table -- The left gripper places milk on the left side of the table -- The left gripper places xx on the left side of the table -- Pass the shampoo to the left gripper -- Pass the bread to the left gripper -- Use the right gripper to grab the bread on the right side of the table -- The left gripper places purple garbage bag on the left side of the table -- Use the right gripper to grab the xx on the right side of the table -- Pass the milk to the left gripper -- Pass the banana to the left gripper -- Pass the shampoo to the left gripper -- The left gripper places Rubik's Cube on the left side of the table -- Use the right gripper to grab the purple garbage bag on the right side of the table -- Pass the yogurt to the left gripper -- Pass the bread to the left gripper -- Use the right gripper to shampoo the on the right side of the table -- User the right gripper to grab the grape on the right side of the table -- The left gripper places milk on the left side of the table -- Use the right gripper to grab the yogurt on the right side of the table -- The left gripper places yogurt on the left side of the table -- Use the right gripper to grab the shampoo on the right side of the table -- Use the right gripper to grab the Rubik's Cube on the right side of the table -- Pass the milk to the left gripper -- Pass the grape to the left gripper -- Pass the grape to the left gripper -- 'null' +- subtask: The left gripper places milk on the left side of the table + subtask_index: 0 +- subtask: The left gripper places bread on the left side of the table + subtask_index: 1 +- subtask: Use the right gripper to grab the shampoo on the right side of the table + subtask_index: 2 +- subtask: Use the right to grab the bread on the right side of table + subtask_index: 3 +- subtask: Pass the bread to the left gripper + subtask_index: 4 +- subtask: Use the right gripper to grab the bread on the right side of the table + subtask_index: 5 +- subtask: 'The left gripper places grape on the left side of the table + + ' + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: 'The left gripper places bread on the left side of the table + + ' + subtask_index: 8 +- subtask: Pass the eggplant to the left gripper + subtask_index: 9 +- subtask: The left gripper places yogurt on the left side of the table + subtask_index: 10 +- subtask: Pass the purple garbage bag to the left gripper + subtask_index: 11 +- subtask: 'Use the right gripper to grab the xbreadon the right side of the table + + ' + subtask_index: 12 +- subtask: The left gripper places yogurt on the left side of the table + subtask_index: 13 +- subtask: Pass the Rubik's Cube to the left gripper + subtask_index: 14 +- subtask: 'Use the right gripper to grab the grape on the right side of the table + + ' + subtask_index: 15 +- subtask: Use the right gripper to grab the grape on the right side of the table + subtask_index: 16 +- subtask: Pass the yogurt to the left gripper + subtask_index: 17 +- subtask: 'Pass the bread to the left gripper + + ' + subtask_index: 18 +- subtask: 'Use the right gripper to grab the yogurt on the right side of the table + + ' + subtask_index: 19 +- subtask: Pass the grape to the left grippe + subtask_index: 20 +- subtask: 'Use the right gripper to grab the milk on the right side of the table + + ' + subtask_index: 21 +- subtask: The left gripper places shampoo on the left side of the table + subtask_index: 22 +- subtask: The left gripper places grape on the left side of the table + subtask_index: 23 +- subtask: The left gripper places eggplant on the left side of the table + subtask_index: 24 +- subtask: Use the right gripper to grab the milk on the right side of the table + subtask_index: 25 +- subtask: Pass the milk to the left gripper + subtask_index: 26 +- subtask: The left gripper places banana on the left side of the table + subtask_index: 27 +- subtask: The left gripper places milk on the left side of the table + subtask_index: 28 +- subtask: The left gripper places xx on the left side of the table + subtask_index: 29 +- subtask: Pass the shampoo to the left gripper + subtask_index: 30 +- subtask: ' + + Pass the bread to the left gripper + + ' + subtask_index: 31 +- subtask: 'Use the right gripper to grab the bread on the right side of the table + + ' + subtask_index: 32 +- subtask: The left gripper places purple garbage bag on the left side of the table + subtask_index: 33 +- subtask: Use the right gripper to grab the xx on the right side of the table + subtask_index: 34 +- subtask: 'Pass the milk to the left gripper + + ' + subtask_index: 35 +- subtask: Pass the banana to the left gripper + subtask_index: 36 +- subtask: 'Pass the shampoo to the left gripper + + ' + subtask_index: 37 +- subtask: The left gripper places Rubik's Cube on the left side of the table + subtask_index: 38 +- subtask: Use the right gripper to grab the purple garbage bag on the right side + of the table + subtask_index: 39 +- subtask: ' + + Pass the yogurt to the left gripper' + subtask_index: 40 +- subtask: ' + + Pass the bread to the left gripper' + subtask_index: 41 +- subtask: Use the right gripper to shampoo the on the right side of the table + subtask_index: 42 +- subtask: 'User the right gripper to grab the grape on the right side of the table + + ' + subtask_index: 43 +- subtask: 'The left gripper places milk on the left side of the table + + ' + subtask_index: 44 +- subtask: Use the right gripper to grab the yogurt on the right side of the table + subtask_index: 45 +- subtask: 'The left gripper places yogurt on the left side of the table + + ' + subtask_index: 46 +- subtask: Use the right gripper to grab the shampoo on the right side of the table + subtask_index: 47 +- subtask: Use the right gripper to grab the Rubik's Cube on the right side of the + table + subtask_index: 48 +- subtask: ' + + Pass the milk to the left gripper + + ' + subtask_index: 49 +- subtask: Pass the grape to the left gripper + subtask_index: 50 +- subtask: 'Pass the grape to the left gripper + + ' + subtask_index: 51 +- subtask: 'null' + subtask_index: 52 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -220,13 +316,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -234,8 +327,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 69065 fps: 30 @@ -320,11 +412,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_green_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -597,7 +687,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -605,7 +695,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -632,328 +721,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_right_to_left_green_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to pick up the item and transfer it from the right gripper - to the left gripper. - sub_tasks: - - subtask: The left gripper places milk on the left side of the table - subtask_index: 0 - - subtask: The left gripper places bread on the left side of the table - subtask_index: 1 - - subtask: Use the right gripper to grab the shampoo on the right side of the table - subtask_index: 2 - - subtask: Use the right to grab the bread on the right side of table - subtask_index: 3 - - subtask: Pass the bread to the left gripper - subtask_index: 4 - - subtask: Use the right gripper to grab the bread on the right side of the table - subtask_index: 5 - - subtask: 'The left gripper places grape on the left side of the table - - ' - subtask_index: 6 - - subtask: End - subtask_index: 7 - - subtask: 'The left gripper places bread on the left side of the table - - ' - subtask_index: 8 - - subtask: Pass the eggplant to the left gripper - subtask_index: 9 - - subtask: The left gripper places yogurt on the left side of the table - subtask_index: 10 - - subtask: Pass the purple garbage bag to the left gripper - subtask_index: 11 - - subtask: 'Use the right gripper to grab the xbreadon the right side of the table - - ' - subtask_index: 12 - - subtask: The left gripper places yogurt on the left side of the table - subtask_index: 13 - - subtask: Pass the Rubik's Cube to the left gripper - subtask_index: 14 - - subtask: 'Use the right gripper to grab the grape on the right side of the table - - ' - subtask_index: 15 - - subtask: Use the right gripper to grab the grape on the right side of the table - subtask_index: 16 - - subtask: Pass the yogurt to the left gripper - subtask_index: 17 - - subtask: 'Pass the bread to the left gripper - - ' - subtask_index: 18 - - subtask: 'Use the right gripper to grab the yogurt on the right side of the table - - ' - subtask_index: 19 - - subtask: Pass the grape to the left grippe - subtask_index: 20 - - subtask: 'Use the right gripper to grab the milk on the right side of the table - - ' - subtask_index: 21 - - subtask: The left gripper places shampoo on the left side of the table - subtask_index: 22 - - subtask: The left gripper places grape on the left side of the table - subtask_index: 23 - - subtask: The left gripper places eggplant on the left side of the table - subtask_index: 24 - - subtask: Use the right gripper to grab the milk on the right side of the table - subtask_index: 25 - - subtask: Pass the milk to the left gripper - subtask_index: 26 - - subtask: The left gripper places banana on the left side of the table - subtask_index: 27 - - subtask: The left gripper places milk on the left side of the table - subtask_index: 28 - - subtask: The left gripper places xx on the left side of the table - subtask_index: 29 - - subtask: Pass the shampoo to the left gripper - subtask_index: 30 - - subtask: ' - - Pass the bread to the left gripper - - ' - subtask_index: 31 - - subtask: 'Use the right gripper to grab the bread on the right side of the table - - ' - subtask_index: 32 - - subtask: The left gripper places purple garbage bag on the left side of the table - subtask_index: 33 - - subtask: Use the right gripper to grab the xx on the right side of the table - subtask_index: 34 - - subtask: 'Pass the milk to the left gripper - - ' - subtask_index: 35 - - subtask: Pass the banana to the left gripper - subtask_index: 36 - - subtask: 'Pass the shampoo to the left gripper - - ' - subtask_index: 37 - - subtask: The left gripper places Rubik's Cube on the left side of the table - subtask_index: 38 - - subtask: Use the right gripper to grab the purple garbage bag on the right side - of the table - subtask_index: 39 - - subtask: ' - - Pass the yogurt to the left gripper' - subtask_index: 40 - - subtask: ' - - Pass the bread to the left gripper' - subtask_index: 41 - - subtask: Use the right gripper to shampoo the on the right side of the table - subtask_index: 42 - - subtask: 'User the right gripper to grab the grape on the right side of the table - - ' - subtask_index: 43 - - subtask: 'The left gripper places milk on the left side of the table - - ' - subtask_index: 44 - - subtask: Use the right gripper to grab the yogurt on the right side of the table - subtask_index: 45 - - subtask: 'The left gripper places yogurt on the left side of the table - - ' - subtask_index: 46 - - subtask: Use the right gripper to grab the shampoo on the right side of the table - subtask_index: 47 - - subtask: Use the right gripper to grab the Rubik's Cube on the right side of the - table - subtask_index: 48 - - subtask: ' - - Pass the milk to the left gripper - - ' - subtask_index: 49 - - subtask: Pass the grape to the left gripper - subtask_index: 50 - - subtask: 'Pass the grape to the left gripper - - ' - subtask_index: 51 - - subtask: 'null' - subtask_index: 52 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 69065 - dataset_size: 884.48 MB - data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_green_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_khaki_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_khaki_tablecloth.yaml index 735d492aa055a4ba53e045cbbc512856ba866c98..121fecfa44dd2627e91405aeeeac72003ecb3331 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_khaki_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_khaki_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,27 +141,30 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to pick up the item and transfer it from the - right gripper to the left gripper. +task_instruction: +- use the right gripper to pick up the item and transfer it from the right gripper + to the left gripper. sub_tasks: -- Unlabeled -- 'null' +- subtask: Unlabeled + subtask_index: 0 +- subtask: 'null' + subtask_index: 1 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -169,13 +172,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -183,8 +183,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 98 total_frames: 64158 fps: 30 @@ -269,11 +268,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_khaki_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:97 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -546,7 +543,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -554,7 +551,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -581,184 +577,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_right_to_left_khaki_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to pick up the item and transfer it from the right gripper - to the left gripper. - sub_tasks: - - subtask: Unlabeled - subtask_index: 0 - - subtask: 'null' - subtask_index: 1 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 64158 - dataset_size: 749.82 MB - data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_khaki_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (86 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_red_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_red_tablecloth.yaml index 043d0e940eac6a732e808ca3c47f6af11d55c9a8..235ecf9a7be8f60a7416d1a5ff4a2a8a867767a5 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_red_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_red_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,67 +141,115 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to pick up the item and transfer it from the - right gripper to the left gripper. +task_instruction: +- use the right gripper to pick up the item and transfer it from the right gripper + to the left gripper. sub_tasks: -- The left gripper places milk on the left side of the table -- The left gripper places anmuxi on the left side of the table -- The left gripper places Rubik's Cube on the left side of the table -- Pass the blue blackboard erasure to the left gripper -- End -- Pass the eggplant to the left gripper -- Pass the Rubik's Cube to the left gripper -- The left gripper places yogurt on the left side of the table -- Pass the square chewing gum to the left gripper -- Use the right gripper to grab the grape on the right side of the table -- Pass the yogurt to the left gripper -- The left gripper places eggplant on the left side of the table -- The left gripper places grape on the left side of the table -- Use the right gripper to grab the milk on the right side of the table -- Pass the milk to the left gripper -- Use the right gripper to grab the eggplant on the right side of the table -- The left gripper places xx on the left side of the table -- Pass the shower sphereto the left gripper -- Use the right gripper to grab the shower sphere on the right side of the table -- Use the right gripper to grab the Anmuxi on the right side of the table. -- Use the right gripper to grab the xx on the right side of the table -- The left gripper places shower sphere on the left side of the table -- Use the right gripper to grab the blue blackboard erasure on the right side of the - table -- Use the right gripper to grab the plush banana on the right side of the table -- Pass the plush banana to the left gripper -- Pass the shower sphere to the left gripper -- Pass the to the yogurt left gripper -- The left gripper places blue blackboard erasure on the left side of the table -- The left gripper places plush banana on the left side of the table -- Use the right gripper to grab the square chewing gum on the right side of the table -- The left gripper places square chewing gum on the left side of the table -- The left gripper places eyeglass case on the left side of the table -- Pass the xx to the left gripper -- Pass the eyeglass case to the left gripper -- Use the right gripper to grab the yogurt on the right side of the table -- Use the right gripper to grab the eyeglass case on the right side of the table -- Pass the yogurt to the left gripper -- Use the right gripper to grab the Rubik's Cube on the right side of the table -- Pass the anmuxi to the left gripper -- Pass the grape to the left gripper -- 'null' +- subtask: The left gripper places milk on the left side of the table + subtask_index: 0 +- subtask: The left gripper places anmuxi on the left side of the table + subtask_index: 1 +- subtask: The left gripper places Rubik's Cube on the left side of the table + subtask_index: 2 +- subtask: Pass the blue blackboard erasure to the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Pass the eggplant to the left gripper + subtask_index: 5 +- subtask: Pass the Rubik's Cube to the left gripper + subtask_index: 6 +- subtask: The left gripper places yogurt on the left side of the table + subtask_index: 7 +- subtask: Pass the square chewing gum to the left gripper + subtask_index: 8 +- subtask: Use the right gripper to grab the grape on the right side of the table + subtask_index: 9 +- subtask: Pass the yogurt to the left gripper + subtask_index: 10 +- subtask: The left gripper places eggplant on the left side of the table + subtask_index: 11 +- subtask: The left gripper places grape on the left side of the table + subtask_index: 12 +- subtask: Use the right gripper to grab the milk on the right side of the table + subtask_index: 13 +- subtask: Pass the milk to the left gripper + subtask_index: 14 +- subtask: Use the right gripper to grab the eggplant on the right side of the table + subtask_index: 15 +- subtask: The left gripper places xx on the left side of the table + subtask_index: 16 +- subtask: Pass the shower sphereto the left gripper + subtask_index: 17 +- subtask: Use the right gripper to grab the shower sphere on the right side of the + table + subtask_index: 18 +- subtask: Use the right gripper to grab the Anmuxi on the right side of the table. + subtask_index: 19 +- subtask: Use the right gripper to grab the xx on the right side of the table + subtask_index: 20 +- subtask: The left gripper places shower sphere on the left side of the table + subtask_index: 21 +- subtask: Use the right gripper to grab the blue blackboard erasure on the right + side of the table + subtask_index: 22 +- subtask: Use the right gripper to grab the plush banana on the right side of the + table + subtask_index: 23 +- subtask: Pass the plush banana to the left gripper + subtask_index: 24 +- subtask: Pass the shower sphere to the left gripper + subtask_index: 25 +- subtask: Pass the to the yogurt left gripper + subtask_index: 26 +- subtask: The left gripper places blue blackboard erasure on the left side of the + table + subtask_index: 27 +- subtask: The left gripper places plush banana on the left side of the table + subtask_index: 28 +- subtask: Use the right gripper to grab the square chewing gum on the right side + of the table + subtask_index: 29 +- subtask: The left gripper places square chewing gum on the left side of the table + subtask_index: 30 +- subtask: The left gripper places eyeglass case on the left side of the table + subtask_index: 31 +- subtask: Pass the xx to the left gripper + subtask_index: 32 +- subtask: Pass the eyeglass case to the left gripper + subtask_index: 33 +- subtask: Use the right gripper to grab the yogurt on the right side of the table + subtask_index: 34 +- subtask: Use the right gripper to grab the eyeglass case on the right side of the + table + subtask_index: 35 +- subtask: Pass the yogurt to the left gripper + subtask_index: 36 +- subtask: Use the right gripper to grab the Rubik's Cube on the right side of the + table + subtask_index: 37 +- subtask: Pass the anmuxi to the left gripper + subtask_index: 38 +- subtask: Pass the grape to the left gripper + subtask_index: 39 +- subtask: 'null' + subtask_index: 40 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -209,13 +257,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -223,8 +268,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 58861 fps: 30 @@ -309,11 +353,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_red_tablecloth_qce |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -586,7 +628,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -594,7 +636,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -621,269 +662,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_right_to_left_red_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to pick up the item and transfer it from the right gripper - to the left gripper. - sub_tasks: - - subtask: The left gripper places milk on the left side of the table - subtask_index: 0 - - subtask: The left gripper places anmuxi on the left side of the table - subtask_index: 1 - - subtask: The left gripper places Rubik's Cube on the left side of the table - subtask_index: 2 - - subtask: Pass the blue blackboard erasure to the left gripper - subtask_index: 3 - - subtask: End - subtask_index: 4 - - subtask: Pass the eggplant to the left gripper - subtask_index: 5 - - subtask: Pass the Rubik's Cube to the left gripper - subtask_index: 6 - - subtask: The left gripper places yogurt on the left side of the table - subtask_index: 7 - - subtask: Pass the square chewing gum to the left gripper - subtask_index: 8 - - subtask: Use the right gripper to grab the grape on the right side of the table - subtask_index: 9 - - subtask: Pass the yogurt to the left gripper - subtask_index: 10 - - subtask: The left gripper places eggplant on the left side of the table - subtask_index: 11 - - subtask: The left gripper places grape on the left side of the table - subtask_index: 12 - - subtask: Use the right gripper to grab the milk on the right side of the table - subtask_index: 13 - - subtask: Pass the milk to the left gripper - subtask_index: 14 - - subtask: Use the right gripper to grab the eggplant on the right side of the table - subtask_index: 15 - - subtask: The left gripper places xx on the left side of the table - subtask_index: 16 - - subtask: Pass the shower sphereto the left gripper - subtask_index: 17 - - subtask: Use the right gripper to grab the shower sphere on the right side of - the table - subtask_index: 18 - - subtask: Use the right gripper to grab the Anmuxi on the right side of the table. - subtask_index: 19 - - subtask: Use the right gripper to grab the xx on the right side of the table - subtask_index: 20 - - subtask: The left gripper places shower sphere on the left side of the table - subtask_index: 21 - - subtask: Use the right gripper to grab the blue blackboard erasure on the right - side of the table - subtask_index: 22 - - subtask: Use the right gripper to grab the plush banana on the right side of the - table - subtask_index: 23 - - subtask: Pass the plush banana to the left gripper - subtask_index: 24 - - subtask: Pass the shower sphere to the left gripper - subtask_index: 25 - - subtask: Pass the to the yogurt left gripper - subtask_index: 26 - - subtask: The left gripper places blue blackboard erasure on the left side of the - table - subtask_index: 27 - - subtask: The left gripper places plush banana on the left side of the table - subtask_index: 28 - - subtask: Use the right gripper to grab the square chewing gum on the right side - of the table - subtask_index: 29 - - subtask: The left gripper places square chewing gum on the left side of the table - subtask_index: 30 - - subtask: The left gripper places eyeglass case on the left side of the table - subtask_index: 31 - - subtask: Pass the xx to the left gripper - subtask_index: 32 - - subtask: Pass the eyeglass case to the left gripper - subtask_index: 33 - - subtask: Use the right gripper to grab the yogurt on the right side of the table - subtask_index: 34 - - subtask: Use the right gripper to grab the eyeglass case on the right side of - the table - subtask_index: 35 - - subtask: Pass the yogurt to the left gripper - subtask_index: 36 - - subtask: Use the right gripper to grab the Rubik's Cube on the right side of the - table - subtask_index: 37 - - subtask: Pass the anmuxi to the left gripper - subtask_index: 38 - - subtask: Pass the grape to the left gripper - subtask_index: 39 - - subtask: 'null' - subtask_index: 40 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 58861 - dataset_size: 1.43 GB - data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_red_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_white_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_white_tablecloth.yaml index 160e6dcc03e9b3176790dcbb8c05f841fcbd14c5..493006cfba9d3a057969595012b0e1dcce7bce24 100644 --- a/dataset_info/agilex_cobot_magic_pass_object_right_to_left_white_tablecloth.yaml +++ b/dataset_info/agilex_cobot_magic_pass_object_right_to_left_white_tablecloth.yaml @@ -1,18 +1,18 @@ -task_categories: &id001 +task_categories: - robotics -language: &id002 +language: - en -tags: &id003 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: &id004 +configs: - config_name: default data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. -extra_gated_fields: &id005 +extra_gated_fields: Company/Organization: type: text description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" @@ -30,7 +30,7 @@ scene_type: level5: null env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id006 +objects: - object_name: table level1: home_storage level2: table @@ -141,68 +141,170 @@ objects: &id006 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: use the right gripper to pick up the item and transfer it from the - right gripper to the left gripper. +task_instruction: +- use the right gripper to pick up the item and transfer it from the right gripper + to the left gripper. sub_tasks: -- The left gripper places milk on the left side of the table -- Pass the square chewing gun to the left gripper -- The left gripper places Rubik's Cube on the left side of the table -- Pass the yogurt to the left gripper -- The left gripper places grey towel on the left side of the table -- End -- Use the right gripper to grab the banana on the right side of the table -- The left gripper places bananal on the left side of the table -- Pass the shower spherer to the left gripper -- The left gripper places yogurt on the left side of the table -- Pass the eggplant to the left gripper -- Pass the eyeglass case to the left gripper -- Use the right gripper to grab the banana on the right side of the table -- The left gripper places eggplant on the left side of the table -- Pass the eggplant to the left gripper -- Use the right gripper to grab the milk on the right side of the table -- The left gripper places banana on the left side of the table -- Pass the Rubik's Cube to the left gripper -- Use the right gripper to grab the eggplant on the right side of the table -- Use the right gripper to grab the square chewing gun on the right side of the table -- Use the right gripper to grab the banana the on right side of the table -- Use the right gripper to grab the shower sphere on the right side of the table -- Use the right gripper to grab the bananal on the right side of the table -- The left gripper places shower sphere on the left side of the table -- Pass the grey towel to the left gripper -- Use the right gripper to grab the blue blackboard erasure on the right side of the - table -- Pass the banana to the left gripper -- Use the right gripper to grab the purple garbage bag on the right side of the table -- Pass the purple garbage bag to the left gripper -- The left gripper places blue blackboard erasure on the left side of the table -- Use the right gripper to grab the eggplant on the right side of the table -- Use the right gripper to grab the square chewing gum on the right side of the table -- The left gripper places square chewing gun on the left side of the table -- Pass the shower sphere to the left gripper -- The left gripper places eyeglass case on the left side of the table -- Use the right gripper to grab the yogurt on the right side of the table -- Pass the blue blackboard erasure to the left gripper -- Use the right gripper to grab the eyeglass case on the right side of the table -- Use the right gripper to grab the Rubik's Cube on the right side of the table -- Pass the milk to the left gripper -- The left gripper places purple garbage bag on the left side of the table -- 'null' +- subtask: The left gripper places milk on the left side of the table + subtask_index: 0 +- subtask: ' + + Pass the square chewing gun to the left gripper + + ' + subtask_index: 1 +- subtask: The left gripper places Rubik's Cube on the left side of the table + subtask_index: 2 +- subtask: ' + + Pass the yogurt to the left gripper + + ' + subtask_index: 3 +- subtask: The left gripper places grey towel on the left side of the table + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Use the right gripper to grab the banana on the right side of the table + subtask_index: 6 +- subtask: The left gripper places bananal on the left side of the table + subtask_index: 7 +- subtask: ' + + Pass the shower spherer to the left gripper + + ' + subtask_index: 8 +- subtask: The left gripper places yogurt on the left side of the table + subtask_index: 9 +- subtask: ' + + Pass the eggplant to the left gripper + + ' + subtask_index: 10 +- subtask: ' + + Pass the eyeglass case to the left gripper + + ' + subtask_index: 11 +- subtask: Use the right gripper to grab the banana on the right side of the table + subtask_index: 12 +- subtask: The left gripper places eggplant on the left side of the table + subtask_index: 13 +- subtask: ' + + Pass the eggplant to the left gripper + + ' + subtask_index: 14 +- subtask: Use the right gripper to grab the milk on the right side of the table + subtask_index: 15 +- subtask: The left gripper places banana on the left side of the table + subtask_index: 16 +- subtask: ' + + Pass the Rubik''s Cube to the left gripper + + ' + subtask_index: 17 +- subtask: Use the right gripper to grab the eggplant on the right side of the table + subtask_index: 18 +- subtask: Use the right gripper to grab the square chewing gun on the right side + of the table + subtask_index: 19 +- subtask: Use the right gripper to grab the banana the on right side of the table + subtask_index: 20 +- subtask: Use the right gripper to grab the shower sphere on the right side of the + table + subtask_index: 21 +- subtask: Use the right gripper to grab the bananal on the right side of the table + subtask_index: 22 +- subtask: The left gripper places shower sphere on the left side of the table + subtask_index: 23 +- subtask: ' + + Pass the grey towel to the left gripper + + ' + subtask_index: 24 +- subtask: Use the right gripper to grab the blue blackboard erasure on the right + side of the table + subtask_index: 25 +- subtask: ' + + Pass the banana to the left gripper + + ' + subtask_index: 26 +- subtask: Use the right gripper to grab the purple garbage bag on the right side + of the table + subtask_index: 27 +- subtask: ' + + Pass the purple garbage bag to the left gripper + + ' + subtask_index: 28 +- subtask: The left gripper places blue blackboard erasure on the left side of the + table + subtask_index: 29 +- subtask: Use the right gripper to grab the eggplant on the right side of the table + subtask_index: 30 +- subtask: Use the right gripper to grab the square chewing gum on the right side + of the table + subtask_index: 31 +- subtask: The left gripper places square chewing gun on the left side of the table + subtask_index: 32 +- subtask: ' + + Pass the shower sphere to the left gripper + + ' + subtask_index: 33 +- subtask: The left gripper places eyeglass case on the left side of the table + subtask_index: 34 +- subtask: Use the right gripper to grab the yogurt on the right side of the table + subtask_index: 35 +- subtask: ' + + Pass the blue blackboard erasure to the left gripper + + ' + subtask_index: 36 +- subtask: Use the right gripper to grab the eyeglass case on the right side of the + table + subtask_index: 37 +- subtask: Use the right gripper to grab the Rubik's Cube on the right side of the + table + subtask_index: 38 +- subtask: ' + + Pass the milk to the left gripper + + ' + subtask_index: 39 +- subtask: The left gripper places purple garbage bag on the left side of the table + subtask_index: 40 +- subtask: 'null' + subtask_index: 41 atomic_actions: - grasp - lift - lower - handover - takeover -robot_name: agilex_cobot_magic +robot_name: +- agilex_cobot_magic end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. -sensor_list: &id007 +sensor_list: - cam_head_rgb - cam_left_wrist_rgb - cam_right_wrist_rgb -came_info: &id008 +came_info: cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p @@ -210,13 +312,10 @@ came_info: &id008 pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz joint_rotation_dim: radian end_rotation_dim: radian end_translation_dim: meter -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id009 +annotations: - eef_acc_mag_annotation.jsonl - eef_direction_annotation.jsonl - eef_velocity_annotation.jsonl @@ -224,8 +323,7 @@ annotations: &id009 - gripper_mode_annotation.jsonl - scene_annotations.jsonl - subtask_annotations.jsonl -operation_platform_height: null -statistics: &id010 +statistics: total_episodes: 99 total_frames: 52998 fps: 30 @@ -310,11 +408,9 @@ data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_white_tablecloth_q |-- info.yaml `-- README.md' -splits: &id011 +splits: train: 0:98 -data_path: data/chunk-{id}/episode_{id}.parquet -video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} -features: &id012 +features: observation.images.cam_head_rgb: dtype: video shape: @@ -587,7 +683,7 @@ features: &id012 dtype: float32 shape: - 2 -authors: &id013 +authors: contributed_by: - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is @@ -595,7 +691,6 @@ dataset_description: This dataset uses an extended format based on LeRobot and i homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. @@ -622,325 +717,6 @@ additional_citations: 'If you use this dataset, please also consider citing: ' version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -raw: - task_categories: *id001 - language: *id002 - tags: *id003 - license: apache-2.0 - configs: *id004 - extra_gated_prompt: By accessing this dataset, you agree to cite the associated - paper in your research/publications—see the "Citation" section for details. You - agree to not use the dataset to conduct experiments that cause harm to human subjects. - extra_gated_fields: *id005 - codebase_version: v2.1 - dataset_name: agilex_cobot_magic_pass_object_right_to_left_white_tablecloth - dataset_uuid: 00000000-0000-0000-0000-000000000000 - scene_type: - level1: commercial_convenience - level2: supermarket - level3: null - level4: null - level5: null - env_type: Due to some reasons, this dataset temporarily cannot provide the environment - type information. - objects: *id006 - task_operation_type: Due to some reasons, this dataset temporarily cannot provide - the operation type information. - task_instruction: - - use the right gripper to pick up the item and transfer it from the right gripper - to the left gripper. - sub_tasks: - - subtask: The left gripper places milk on the left side of the table - subtask_index: 0 - - subtask: ' - - Pass the square chewing gun to the left gripper - - ' - subtask_index: 1 - - subtask: The left gripper places Rubik's Cube on the left side of the table - subtask_index: 2 - - subtask: ' - - Pass the yogurt to the left gripper - - ' - subtask_index: 3 - - subtask: The left gripper places grey towel on the left side of the table - subtask_index: 4 - - subtask: End - subtask_index: 5 - - subtask: Use the right gripper to grab the banana on the right side of the table - subtask_index: 6 - - subtask: The left gripper places bananal on the left side of the table - subtask_index: 7 - - subtask: ' - - Pass the shower spherer to the left gripper - - ' - subtask_index: 8 - - subtask: The left gripper places yogurt on the left side of the table - subtask_index: 9 - - subtask: ' - - Pass the eggplant to the left gripper - - ' - subtask_index: 10 - - subtask: ' - - Pass the eyeglass case to the left gripper - - ' - subtask_index: 11 - - subtask: Use the right gripper to grab the banana on the right side of the table - subtask_index: 12 - - subtask: The left gripper places eggplant on the left side of the table - subtask_index: 13 - - subtask: ' - - Pass the eggplant to the left gripper - - ' - subtask_index: 14 - - subtask: Use the right gripper to grab the milk on the right side of the table - subtask_index: 15 - - subtask: The left gripper places banana on the left side of the table - subtask_index: 16 - - subtask: ' - - Pass the Rubik''s Cube to the left gripper - - ' - subtask_index: 17 - - subtask: Use the right gripper to grab the eggplant on the right side of the table - subtask_index: 18 - - subtask: Use the right gripper to grab the square chewing gun on the right side - of the table - subtask_index: 19 - - subtask: Use the right gripper to grab the banana the on right side of the table - subtask_index: 20 - - subtask: Use the right gripper to grab the shower sphere on the right side of - the table - subtask_index: 21 - - subtask: Use the right gripper to grab the bananal on the right side of the table - subtask_index: 22 - - subtask: The left gripper places shower sphere on the left side of the table - subtask_index: 23 - - subtask: ' - - Pass the grey towel to the left gripper - - ' - subtask_index: 24 - - subtask: Use the right gripper to grab the blue blackboard erasure on the right - side of the table - subtask_index: 25 - - subtask: ' - - Pass the banana to the left gripper - - ' - subtask_index: 26 - - subtask: Use the right gripper to grab the purple garbage bag on the right side - of the table - subtask_index: 27 - - subtask: ' - - Pass the purple garbage bag to the left gripper - - ' - subtask_index: 28 - - subtask: The left gripper places blue blackboard erasure on the left side of the - table - subtask_index: 29 - - subtask: Use the right gripper to grab the eggplant on the right side of the - table - subtask_index: 30 - - subtask: Use the right gripper to grab the square chewing gum on the right side - of the table - subtask_index: 31 - - subtask: The left gripper places square chewing gun on the left side of the table - subtask_index: 32 - - subtask: ' - - Pass the shower sphere to the left gripper - - ' - subtask_index: 33 - - subtask: The left gripper places eyeglass case on the left side of the table - subtask_index: 34 - - subtask: Use the right gripper to grab the yogurt on the right side of the table - subtask_index: 35 - - subtask: ' - - Pass the blue blackboard erasure to the left gripper - - ' - subtask_index: 36 - - subtask: Use the right gripper to grab the eyeglass case on the right side of - the table - subtask_index: 37 - - subtask: Use the right gripper to grab the Rubik's Cube on the right side of the - table - subtask_index: 38 - - subtask: ' - - Pass the milk to the left gripper - - ' - subtask_index: 39 - - subtask: The left gripper places purple garbage bag on the left side of the table - subtask_index: 40 - - subtask: 'null' - subtask_index: 41 - atomic_actions: - - grasp - - lift - - lower - - handover - - takeover - robot_name: - - agilex_cobot_magic - end_effector_type: two_finger_gripper - tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation - type information. - sensor_list: *id007 - came_info: *id008 - depth_enabled: false - coordinate_definition: right-hand-frame - joint_rotation_dim: radian - end_rotation_dim: radian - end_translation_dim: meter - annotations: *id009 - statistics: *id010 - frame_num: 52998 - dataset_size: 508.79 MB - data_structure: 'Agilex_Cobot_Magic_pass_object_right_to_left_white_tablecloth_qced_hardlink/ - - |-- annotations - - | |-- eef_acc_mag_annotation.jsonl - - | |-- eef_direction_annotation.jsonl - - | |-- eef_velocity_annotation.jsonl - - | |-- gripper_activity_annotation.jsonl - - | |-- gripper_mode_annotation.jsonl - - | |-- scene_annotations.jsonl - - | `-- subtask_annotations.jsonl - - |-- data - - | `-- chunk-000 - - | |-- episode_000000.parquet - - | |-- episode_000001.parquet - - | |-- episode_000002.parquet - - | |-- episode_000003.parquet - - | |-- episode_000004.parquet - - | |-- episode_000005.parquet - - | |-- episode_000006.parquet - - | |-- episode_000007.parquet - - | |-- episode_000008.parquet - - | |-- episode_000009.parquet - - | |-- episode_000010.parquet - - | `-- episode_000011.parquet - - | `-- ... (87 more entries) - - |-- meta - - | |-- episodes.jsonl - - | |-- episodes_stats.jsonl - - | |-- info.json - - | `-- tasks.jsonl - - |-- videos - - | `-- chunk-000 - - | |-- observation.images.cam_head_rgb - - | |-- observation.images.cam_left_wrist_rgb - - | `-- observation.images.cam_right_wrist_rgb - - |-- info.yaml - - `-- README.md' - splits: *id011 - features: *id012 - authors: *id013 - dataset_description: This dataset uses an extended format based on LeRobot and is - fully compatible with LeRobot. - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - contact_info: For questions, issues, or feedback regarding this dataset, please - contact us. - support_info: For technical support, please open an issue on our GitHub repository. - license_details: apache-2.0 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025},\n }\n" - additional_citations: 'If you use this dataset, please also consider citing: - - LeRobot Framework: https://github.com/huggingface/lerobot - - ' - version_info: Initial Release - data_path: data/chunk-{id}/episode_{id}.parquet - video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} - video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K diff --git a/dataset_info/alpha_bot_2_press_the_button_b.yaml b/dataset_info/alpha_bot_2_press_the_button_b.yaml index b54173513c98fac8d918e8034b94150276e07fda..3e8ceaccc32e85807fa9b4a585c05eae0308ff56 100644 --- a/dataset_info/alpha_bot_2_press_the_button_b.yaml +++ b/dataset_info/alpha_bot_2_press_the_button_b.yaml @@ -1,13 +1,14 @@ -task_categories: &id004 +task_categories: - robotics -language: &id003 +language: - en -- zh -tags: &id007 +tags: - RoboCOIN - LeRobot license: apache-2.0 -configs: default +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper in your research/publications—see the "Citation" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects. @@ -19,13 +20,13 @@ extra_gated_fields: type: country description: e.g., "Germany", "China", "United States" codebase_version: v2.1 -dataset_name: press_the_button_b -dataset_uuid: 627d6f91-a0ba-46b7-96dd-616242c3a6af +dataset_name: alpha_bot_2_press_the_button_b +dataset_uuid: 00000000-0000-0000-0000-000000000000 scene_type: - level1: household +- home env_type: Due to some reasons, this dataset temporarily cannot provide the environment type information. -objects: &id001 +objects: - object_name: mineral_water level1: drinks level2: mineral_water @@ -46,233 +47,473 @@ objects: &id001 level5: null task_operation_type: Due to some reasons, this dataset temporarily cannot provide the operation type information. -task_result: 'null' -task_instruction: Touch the bottle with left gripper +task_instruction: +- after removing the water bottle from the table, press the button. sub_tasks: -- Touch the bottle with left gripper -- End -- Move the bottle away with right gripper -- Abnormal -- Press the button with right gripper -- 'null' +- subtask: Touch the bottle with left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Move the bottle away with right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: Press the button with right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 atomic_actions: - pressbutton - push -robot_name: alpha_bot_2 +robot_name: +- alpha_bot_2 end_effector_type: two_finger_gripper tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation type information. sensor_list: -- sensor_1 -- sensor_2 -- sensor_3 +- cam_chest_rgb +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb came_info: - observation.images.cam_name_1: resolution_1 - observation.images.cam_name_2: resolution_2 + cam_chest_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p depth_enabled: false coordinate_definition: right-hand-frame -origin_xyz: origin_xyz -joint_rotation_dim: joint_rotation_dim -end_rotation_dim: end_rotation_dim -end_translation_dim: end_translation_dim -base_robtation_dim: base_rotation_dim -base_translation_dim: base_translation_dim -annotations: &id005 - subtask_annotation: auto_generated - scene_annotation: auto_generated - eef_direction: auto_generated - eef_velocity: auto_generated - eef_acc_mag: auto_generated - gripper_mode: auto_generated - gripper_activity: auto_generated -operation_platform_height: null -statistics: &id002 - total_episodes: 65 - total_frames: 50921 - total_tasks: 1 - total_videos: 260 +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 34 + total_frames: 26258 + fps: 30 + total_tasks: 6 + total_videos: 136 total_chunks: 1 chunks_size: 1000 - fps: 30 -frame_num: 50921 -dataset_size: 943.7MB -data_structure: "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n\ - │ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│\ - \ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n\ - │ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n\ - │ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ \ - \ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n\ - │ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n\ - │ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n \ - \ ├── observation.images.cam_chest_rgb/\n │ ├── episode_000000.mp4\n \ - \ │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n \ - \ │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └──\ - \ (...)\n ├── observation.images.cam_head_rgb/\n │ ├── episode_000000.mp4\n\ - \ │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n \ - \ │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └──\ - \ (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n\ - \ │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n \ - \ │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └──\ - \ (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n\ - \ ├── episode_000001.mp4\n ├── episode_000002.mp4\n \ - \ ├── episode_000003.mp4\n ├── episode_000004.mp4\n └──\ - \ (...)" + state_dim: 28 + action_dim: 28 + camera_views: 4 + dataset_size: 482.17 MB +frame_num: 26258 +dataset_size: 482.17 MB +data_structure: 'alpha_bot_2_press_the_button_b_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (22 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_chest_rgb + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' splits: - train: train -data_path: data_path -video_path: video_path + train: 0:33 features: - features_placeholder: features_placeholder -authors: &id006 + observation.images.cam_chest_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 28 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_arm_joint_7_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_arm_joint_7_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 28 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_arm_joint_7_rad + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_arm_joint_7_rad + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_ori_x + - left_eef_ori_y + - left_eef_ori_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_ori_x + - right_eef_ori_y + - right_eef_ori_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_ori_x + - left_eef_ori_y + - left_eef_ori_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_ori_x + - right_eef_ori_y + - right_eef_ori_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: contributed_by: - - name: RoboCOIN - url: https://flagopen.github.io/RoboCOIN/ - affiliation: RoboCOIN Team - annotated_by: - - name: RoboCOIN - url: https://flagopen.github.io/RoboCOIN/ - affiliation: RoboCOIN Team + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) dataset_description: This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot. homepage: https://flagopen.github.io/RoboCOIN/ paper: https://arxiv.org/abs/2511.17441 repository: https://github.com/FlagOpen/RoboCOIN -contact_detail: robocoin@baai.ac.cn contact_info: For questions, issues, or feedback regarding this dataset, please contact us. support_info: For technical support, please open an issue on our GitHub repository. license_details: apache-2.0 -citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu\ - \ Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng\ - \ Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu,\ - \ Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu\ - \ Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng,\ - \ Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing\ - \ Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang,\ - \ YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang,\ - \ Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang\ - \ Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint\ - \ arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n\ - \ }" +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" additional_citations: 'If you use this dataset, please also consider citing: LeRobot Framework: https://github.com/huggingface/lerobot ' version_info: Initial Release -video_url: null -raw: - path: alpha_bot_2_press_the_button_b - dataset_name: press_the_button_b - robot_type: '' - end_effector_type: - - two_finger_gripper - scene_type: [] - atomic_actions: - - pressbutton - - push - tasks: Touch the bottle with left gripper - objects: *id001 - operation_platform_height: null - frame_range: 0-50921 - dataset_size: 943.7MB - statistics: *id002 - dataset_uuid: 627d6f91-a0ba-46b7-96dd-616242c3a6af - language: *id003 - task_categories: *id004 - sub_tasks: - - Touch the bottle with left gripper - - End - - Move the bottle away with right gripper - - Abnormal - - Press the button with right gripper - - 'null' - annotations: *id005 - authors: *id006 - homepage: https://flagopen.github.io/RoboCOIN/ - paper: https://arxiv.org/abs/2511.17441 - repository: https://github.com/FlagOpen/RoboCOIN - license: apache-2.0 - tags: *id007 - citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ - \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\ - \ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\ - \ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\ - \ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\ - \ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du,\ - \ Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen,\ - \ Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong\ - \ Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang,\ - \ Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun\ - \ Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao\ - \ Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen,\ - \ Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao\ - \ Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao,\ - \ Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n\ - \ journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n\ - \ year={2025}\n }" - depth_enabled: false - data_schema: "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n\ - │ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n\ - │ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n\ - │ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n\ - │ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ \ - \ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├──\ - \ episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n\ - │ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n\ - \ └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │\ - \ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├──\ - \ episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n\ - \ │ └── (...)\n ├── observation.images.cam_head_rgb/\n \ - \ │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ \ - \ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├──\ - \ episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n\ - \ │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n \ - \ │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │\ - \ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n\ - \ ├── episode_000000.mp4\n ├── episode_000001.mp4\n \ - \ ├── episode_000002.mp4\n ├── episode_000003.mp4\n \ - \ ├── episode_000004.mp4\n └── (...)" - structure: "alpha_bot_2_press_the_button_b_qced_hardlink/\n├── annotations/\n│ \ - \ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n\ - │ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n\ - │ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n\ - │ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ \ - \ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├──\ - \ episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n\ - │ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n\ - \ └── chunk-000/\n ├── observation.images.cam_chest_rgb/\n │\ - \ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├──\ - \ episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n\ - \ │ └── (...)\n ├── observation.images.cam_head_rgb/\n \ - \ │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ \ - \ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├──\ - \ episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n\ - \ │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n \ - \ │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │\ - \ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n\ - \ ├── episode_000000.mp4\n ├── episode_000001.mp4\n \ - \ ├── episode_000002.mp4\n ├── episode_000003.mp4\n \ - \ ├── episode_000004.mp4\n └── (...)" -surface_material: null -light: null -noise_level: null -tactile_info: null -sound_info: null -base_transform: null -end_transform: null -data_collector: null -data_annotator: null -data_modifier: null -modify_logo: null -data_dowload: null -field_definiton: null -dataset_organization: null -dataset_email: null -dataset_license: null -frame_range: 10K-100K +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_chest_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4