RoboChallengeAI commited on
Commit
e74cdb1
·
verified ·
1 Parent(s): 64fe581

Upload convert_to_lerobot.py

Browse files
Files changed (1) hide show
  1. convert_to_lerobot.py +248 -0
convert_to_lerobot.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Minimal example: convert dataset to the LeRobot format.
3
+
4
+ CLI Example (using the *arrange_flowers* task as an example):
5
+ python convert_libero_to_lerobot.py \
6
+ --repo-name arrange_flowers_repo \
7
+ --raw-dataset /path/to/arrange_flowers \
8
+ --frame-interval 1 \
9
+
10
+ Notes:
11
+ - If you plan to push to the Hugging Face Hub later, handle that outside this script.
12
+ """
13
+
14
+ import argparse
15
+ import json
16
+ import shutil
17
+ from pathlib import Path
18
+ from typing import Any, Dict, List
19
+
20
+ import cv2
21
+ import numpy as np
22
+ from lerobot.common.datasets.lerobot_dataset import HF_LEROBOT_HOME, LeRobotDataset
23
+
24
+
25
+ def load_jsonl(path: Path) -> List[Dict[str, Any]]:
26
+ """Load a JSONL file into a list of dicts."""
27
+ with path.open("r", encoding="utf-8") as f:
28
+ return [json.loads(line) for line in f]
29
+
30
+
31
+ def create_lerobot_dataset(
32
+ repo_name: str,
33
+ robot_type: str,
34
+ fps: float,
35
+ height: int,
36
+ width: int,
37
+ ) -> LeRobotDataset:
38
+ """
39
+ Create a LeRobot dataset with custom feature schema
40
+ """
41
+ dataset = LeRobotDataset.create(
42
+ repo_id=repo_name,
43
+ robot_type=robot_type,
44
+ fps=fps,
45
+ features={
46
+ "global_image": {
47
+ "dtype": "image",
48
+ "shape": (height, width, 3),
49
+ "names": ["height", "width", "channel"],
50
+ },
51
+ "wrist_image": {
52
+ "dtype": "image",
53
+ "shape": (height, width, 3),
54
+ "names": ["height", "width", "channel"],
55
+ },
56
+ "state": {
57
+ "dtype": "float32",
58
+ "shape": (7,), # for joint_positions and gripper width
59
+ "names": ["state"],
60
+ },
61
+ "actions": {
62
+ "dtype": "float32",
63
+ "shape": (8,), # for ee_positions and gripper width
64
+ "names": ["actions"],
65
+ },
66
+ },
67
+ image_writer_threads=32,
68
+ image_writer_processes=16,
69
+ )
70
+ return dataset
71
+
72
+
73
+ def process_episode_dir(
74
+ episode_path: Path,
75
+ dataset: LeRobotDataset,
76
+ frame_interval: int,
77
+ prompt: str,
78
+ ) -> None:
79
+ """
80
+ Process a single episode directory and append frames to the given dataset.
81
+
82
+ episode_path : Path
83
+ Episode directory containing `states/states.jsonl` and `videos/*.mp4`.
84
+ dataset : LeRobotDataset
85
+ Target dataset to which frames are added.
86
+ frame_interval : int
87
+ Sampling stride (>=1).
88
+ prompt : str
89
+ Language instruction of this episode.
90
+ """
91
+ # Modify if your dataset consists of bimanual data.
92
+ states_path = episode_path / "states" / "states.jsonl"
93
+ videos_dir = episode_path / "videos"
94
+
95
+ ep_states = load_jsonl(states_path)
96
+
97
+ # adjust them to match your dataset’s actual naming.
98
+ wrist_video = cv2.VideoCapture(str(videos_dir / "cam_arm_rgb.mp4"))
99
+ global_video = cv2.VideoCapture(str(videos_dir / "cam_global_rgb.mp4"))
100
+
101
+ wrist_frames_count = int(wrist_video.get(cv2.CAP_PROP_FRAME_COUNT))
102
+ global_frames_count = int(global_video.get(cv2.CAP_PROP_FRAME_COUNT))
103
+ n_states = len(ep_states)
104
+
105
+ # assert all lengths match
106
+ assert (
107
+ n_states == wrist_frames_count == global_frames_count
108
+ ), (
109
+ f"Mismatch in episode {episode_path.name}: "
110
+ f"states={n_states}, wrist={wrist_frames_count}, "
111
+ f"global={global_frames_count}"
112
+ )
113
+
114
+ # write frames to the episode of lerobot dataset
115
+ for idx in range(frame_interval, n_states, frame_interval):
116
+ # Build pose
117
+ pose = np.concatenate(
118
+ (np.asarray(ep_states[idx]["ee_positions"]), [ep_states[idx]["gripper_width"]])
119
+ )
120
+ last_pose = np.concatenate(
121
+ (np.asarray(ep_states[idx - frame_interval]["joint_positions"]),
122
+ [ep_states[idx - frame_interval]["gripper_width"]])
123
+ )
124
+
125
+ # Read frames && BGR -> RGB
126
+ # Resize as needed, but update the LeRobot feature shape accordingly.
127
+ _, wrist_image = wrist_video.read()
128
+ _, global_image = global_video.read()
129
+
130
+ wrist_image = cv2.cvtColor(wrist_image, cv2.COLOR_BGR2RGB)
131
+ global_image = cv2.cvtColor(global_image, cv2.COLOR_BGR2RGB)
132
+
133
+ dataset.add_frame(
134
+ {
135
+ "global_image": global_image,
136
+ "wrist_image": wrist_image,
137
+ "state": last_pose.astype(np.float32, copy=False),
138
+ "actions": pose.astype(np.float32, copy=False),
139
+ "task": prompt,
140
+ }
141
+ )
142
+
143
+ wrist_video.release()
144
+ global_video.release()
145
+ dataset.save_episode()
146
+
147
+
148
+ def main(
149
+ repo_name: str,
150
+ raw_dataset: Path,
151
+ frame_interval: int = 1,
152
+ overwrite_repo: bool = False,
153
+ ) -> None:
154
+ """
155
+ Convert a dataset directory into LeRobot format.
156
+
157
+ repo_name : str
158
+ Output repo/dataset name (saved under $LEROBOT_HOME / repo_name).
159
+ raw_dataset : Path
160
+ Path to the raw dataset root directory.
161
+ frame_interval : int, default=1
162
+ Sample every N frames (kept identical).
163
+ overwrite_repo : bool, default=False
164
+ If True, remove the existing dataset directory before writing.
165
+ """
166
+ assert frame_interval >= 1, "frame_interval must be >= 1"
167
+
168
+ # overwrite repo
169
+ dst_dir = HF_LEROBOT_HOME / repo_name
170
+ if overwrite_repo and dst_dir.exists():
171
+ print(f"removing existing dataset at {dst_dir}")
172
+ shutil.rmtree(dst_dir)
173
+
174
+ # Load task_infos
175
+ task_info_path = raw_dataset / "meta" / "task_info.json"
176
+ with task_info_path.open("r", encoding="utf-8") as f:
177
+ task_info = json.load(f)
178
+
179
+ robot_type = task_info["task_desc"]["task_tag"][2] # "ARX5"
180
+ video_info = task_info["video_info"]
181
+ video_info["width"] = 640 # TODO: derive from task_info or actual videos
182
+ video_info["height"] = 480
183
+ fps = float(video_info["fps"])
184
+
185
+ prompt = task_info["task_desc"]["prompt"]
186
+
187
+ # Create dataset, define feature in the form you need.
188
+ # - proprio is stored in `state` and actions in `action`
189
+ # - LeRobot assumes that dtype of image data is `image`
190
+ dataset = create_lerobot_dataset(
191
+ repo_name=repo_name,
192
+ robot_type=robot_type,
193
+ fps=fps,
194
+ height=video_info["height"],
195
+ width=video_info["width"],
196
+ )
197
+
198
+ # populate the dataset to lerobot dataset
199
+ data_root = raw_dataset / "data"
200
+ for episode_path in data_root.iterdir():
201
+ if not episode_path.is_dir():
202
+ continue
203
+ print(f"Processing episode: {episode_path.name}")
204
+ process_episode_dir(
205
+ episode_path=episode_path,
206
+ dataset=dataset,
207
+ frame_interval=frame_interval,
208
+ prompt=prompt,
209
+ )
210
+
211
+ dataset.consolidate(run_compute_stats=False)
212
+ print("Done. Dataset saved to: {dst_dir}")
213
+
214
+
215
+ if __name__ == "__main__":
216
+ parser = argparse.ArgumentParser(
217
+ description="Convert a custom dataset to LeRobot format."
218
+ )
219
+ parser.add_argument(
220
+ "--repo-name",
221
+ required=True,
222
+ help="Name of the output dataset (under $LEROBOT_HOME).",
223
+ )
224
+ parser.add_argument(
225
+ "--raw-dataset",
226
+ required=True,
227
+ type=str,
228
+ help="Path to the raw dataset root.",
229
+ )
230
+ parser.add_argument(
231
+ "--frame-interval",
232
+ type=int,
233
+ default=1,
234
+ help="Sample every N frames. Default: 1",
235
+ )
236
+ parser.add_argument(
237
+ "--overwrite-repo",
238
+ action="store_true",
239
+ help="Remove existing output directory if it exists.",
240
+ )
241
+ args = parser.parse_args()
242
+
243
+ main(
244
+ repo_name=args.repo_name,
245
+ raw_dataset=Path(args.raw_dataset),
246
+ frame_interval=args.frame_interval,
247
+ overwrite_repo=args.overwrite_repo,
248
+ )