Dataset Viewer
Auto-converted to Parquet Duplicate
Search is not available for this dataset
image
imagewidth (px)
120
640
objects
dict
{ "bbox": [ [ 199.84, 200.46, 77.71, 70.88 ], [ 325.27, 104.38, 33.67, 105.99 ], [ 168.85, 90.98, 31.42, 88.73 ], [ 1.92, 87.91, 34.95, 175.35 ], [ 346.58, 1.07, 292.01...
{ "bbox": [ [ 234.22, 317.11, 149.39, 38.55 ], [ 239.48, 347.87, 160, 57.81 ], [ 296.65, 388.33, 1.03, 0 ], [ 251.87, 333.42, 125.94, 22.71 ], [ 128.24, 192.99, 511.76,...
{ "bbox": [ [ 337.02, 244.46, 66.47, 66.75 ], [ 255.84, 257.96, 181.8, 113.63 ], [ 509.65, 215.73, 47.88, 24.6 ], [ 22.66, 206.54, 21.9, 23.93 ], [ 354.36, 162.3, 30.66...
{ "bbox": [ [ 213.81, 192.39, 53.94, 70.28 ], [ 270.63, 87.69, 228.39, 79.21 ], [ 0, 102.42, 137.1, 71.77 ], [ 300.84, 114.61, 88.49, 129.77 ], [ 355.77, 129.35, 144.23...
{ "bbox": [ [ 324.66, 247.92, 250.87, 181.02 ], [ 0, 69.09, 126.84, 169.21 ] ], "segmentation": [ [ [ 392.54, 324.66, 351.22, 324.66, 337.45, 335.48, 324.66, 327.61, 33...
{ "bbox": [ [ 260.18, 252.76, 67.91, 53.3 ], [ 382.02, 411.47, 134.59, 10.64 ], [ 220.6, 411.06, 154.63, 10.25 ], [ 192.14, 406.99, 362.56, 82.84 ] ], "segmentation": [ [ [ ...
{ "bbox": [ [ 61.87, 276.25, 296.42, 103.18 ], [ 2.75, 3.66, 159.4, 312.4 ], [ 295.55, 93.96, 18.42, 58.83 ], [ 326.94, 97.05, 13.55, 25.93 ], [ 356.62, 95.47, 15.71, ...
{ "bbox": [ [ 1.29, 324.14, 257.54, 309.05 ], [ 321.98, 2.79, 105.02, 333.05 ], [ 21.61, 171.22, 405.39, 457.14 ] ], "segmentation": [ [ [ 1.29, 416.12, 6.44, 532.01, 2...
{ "bbox": [ [ 55.38, 132.63, 464.46, 247.77 ], [ 279.44, 43.06, 61.5, 59.15 ], [ 343.72, 17.01, 82, 50.48 ] ], "segmentation": [ [ [ 75.79, 376.52, 103, 351.26, 107.85,...
{ "bbox": [ [ 150, 320.98, 121.97, 50 ], [ 78.42, 166.1, 17.79, 38.91 ], [ 94.66, 169.96, 14.11, 34.55 ], [ 133.45, 77.28, 167.23, 292.23 ], [ 116.55, 74.75, 228.89, ...
{ "bbox": [ [ 365.75, 0.86, 274.25, 353.68 ], [ 0, 0.96, 294.58, 217.82 ] ], "segmentation": [ [ [ 640, 190.64, 640, 190.64, 622.82, 176.84, 606.43, 183.74, 595.21, ...
{ "bbox": [ [ 346.38, 54.94, 127.67, 126.1 ], [ 169.73, 105.41, 28.4, 35.75 ], [ 40.74, 40.16, 85.56, 37.25 ], [ 65.51, 29.25, 88.82, 220.49 ], [ 12.22, 118.15, 185.09,...
{ "bbox": [ [ 289.47, 214.76, 52.47, 72.72 ], [ 283.12, 214.32, 198.68, 252.89 ], [ 146.02, 125.28, 79.47, 65.02 ], [ 422.77, 107.08, 5.78, 27.39 ], [ 425.99, 108.57, 8...
{ "bbox": [ [ 232.68, 103.89, 183.73, 103.03 ], [ 0, 0, 640, 419.52 ] ], "segmentation": [ [ [ 263.59, 206.92, 279.04, 165.71, 279.04, 157.98, 276.46, 156.26, 261.87, ...
{ "bbox": [ [ 83.39, 0.97, 293.82, 305.45 ], [ 318.39, 107.03, 321.29, 281.62 ], [ 2.06, 26.84, 471.75, 334.45 ] ], "segmentation": [ [ [ 317.09, 306.42, 239.52, 270.55, ...
{ "bbox": [ [ 0, 81.6, 324.14, 287.71 ], [ 179.44, 63.65, 270.18, 311.35 ], [ 223.05, 256.17, 167.53, 118.83 ] ], "segmentation": [ [ [ 286.18, 298.71, 263.41, 327.56, ...
{ "bbox": [ [ 170.47, 179.11, 147.58, 319.8 ], [ 51.09, 125.82, 151.38, 372.91 ], [ 222.92, 143.43, 15.47, 39.41 ], [ 334.66, 159.99, 33.18, 21.52 ], [ 257.66, 80.96, 1...
{ "bbox": [ [ 241.38, 142.06, 398.62, 284.94 ], [ 1.84, 105.37, 546.63, 274.24 ], [ 215.37, 0.5, 154.4, 421.67 ], [ 91.84, 63.08, 86.38, 82.79 ], [ 557.91, 84.77, 73.6,...
{ "bbox": [ [ 182.24, 38.39, 302.51, 275.68 ], [ 26.7, 128.63, 582.29, 320.41 ], [ 447.76, 0.86, 126.31, 153.01 ], [ 110.5, 2.06, 113.99, 73.94 ], [ 0, 0, 119.93, ...
{ "bbox": [ [ 102.7, 8.33, 377.51, 449.67 ], [ 39.11, 197.61, 596.94, 255.24 ], [ 215.11, 0, 164.67, 102.92 ], [ 546.71, 1.55, 93.29, 450.78 ], [ 0, 1.44, 41.25, ...
{ "bbox": [ [ 46.64, 91.34, 330.36, 307.04 ], [ 0, 106.43, 110.74, 214.29 ] ], "segmentation": [ [ [ 180.08, 126.32, 154.17, 154.82, 137.33, 194.98, 129.55, 214.41, 130...
{ "bbox": [ [ 98.15, 284.73, 133.7, 122.69 ], [ 1.88, 92.99, 637.8, 171.9 ], [ 35.53, 109.1, 62.66, 252.48 ] ], "segmentation": [ [ [ 187, 299.96, 170.92, 313.5, 153.15...
{ "bbox": [ [ 229.18, 0.94, 236.01, 419.01 ] ], "segmentation": [ [ [ 280.48, 2.65, 326.66, 22.32, 354.88, 49.68, 367.71, 71.06, 370.27, 82.18, 405.33, 138.61, 426.71, ...
{ "bbox": [ [ 0, 38.76, 369.58, 592.35 ] ], "segmentation": [ [ [ 0, 38.76, 38.63, 70.95, 55.37, 87.69, 78.55, 107.01, 88.85, 125.03, 105.59, 116.02, 151.95, 13...
{ "bbox": [ [ 179.15, 367.34, 122.47, 91.09 ], [ 261.03, 309.48, 100.36, 57.74 ], [ 182.02, 202.28, 15.89, 21.07 ], [ 0.98, 257.78, 30.57, 140.63 ], [ 138.11, 181.87, 2...
{ "bbox": [ [ 266.67, 0, 201.69, 269.58 ], [ 250.18, 254.3, 203.64, 203.64 ], [ 106.79, 0, 201.7, 217.89 ] ], "segmentation": [ [ [ 281.21, 264.73, 283.15, 226.91, 266....
{ "bbox": [ [ 343.61, 137.36, 81.69, 101.1 ], [ 0, 4.03, 638.97, 332.67 ] ], "segmentation": [ [ [ 346.32, 198.29, 343.61, 185.2, 344.96, 167.6, 345.41, 152.7, 351.73, ...
{ "bbox": [ [ 271.53, 59.1, 248.41, 360.62 ], [ 149.9, 42.19, 130.2, 185.01 ], [ 10.03, 1.82, 628.25, 422.18 ] ], "segmentation": [ [ [ 284.38, 101.07, 278.38, 122.49, ...
{ "bbox": [ [ 384, 191.23, 244.29, 156 ], [ 442.49, 145.66, 197.51, 108.11 ], [ 129.87, 1.32, 22.96, 41.7 ], [ 154.19, 0, 27.02, 47.13 ], [ 388.89, 0, 250.81, 205...
{ "bbox": [ [ 472.91, 184.62, 99.49, 82.41 ], [ 189.47, 38.12, 285.77, 271.97 ], [ 292.09, 165.44, 42.12, 59.42 ] ], "segmentation": [ [ [ 480.97, 188.12, 495.28, 184.62, ...
{ "bbox": [ [ 139.26, 297.19, 175.59, 212.17 ], [ 97.13, 373.43, 118.63, 135.73 ], [ 1.37, 11.81, 425.63, 619.4 ], [ 168.71, 211.31, 107.14, 108.08 ] ], "segmentation": [ [ [ ...
{ "bbox": [ [ 153.26, 2.77, 274.2, 339.75 ], [ 49.29, 145.83, 549.43, 306.04 ], [ 447.97, 1.09, 188.67, 258.57 ], [ 1.03, 0, 66.75, 55.46 ], [ 340.12, 0.29, 108.55, ...
{ "bbox": [ [ 465.31, 216.49, 26.44, 46.16 ], [ 134.28, 102.62, 283.77, 241.5 ], [ 167.11, 74.51, 72.06, 125.9 ], [ 48.51, 56.64, 50.74, 186.47 ], [ 1.24, 58.05, 56.07,...
{ "bbox": [ [ 301.59, 185.33, 40.43, 57.29 ], [ 114.14, 137.5, 404.41, 162.67 ], [ 0.94, 86.21, 144.31, 73.09 ], [ 228.46, 47.51, 354.59, 107.95 ] ], "segmentation": [ [ [ ...
{ "bbox": [ [ 151.27, 270.22, 222.39, 362.02 ], [ 59.18, 89.46, 101.85, 404.65 ] ], "segmentation": [ [ [ 164.2, 358.14, 156.44, 346.51, 151.27, 337.45, 151.27, 325.82, ...
{ "bbox": [ [ 242.9, 270.97, 190.65, 110.32 ], [ 39.06, 273.5, 415.24, 201.46 ], [ 270.9, 216.93, 166.87, 84.46 ], [ 292.08, 131.11, 68.73, 168.59 ], [ 68.8, 98.88, 92....
{ "bbox": [ [ 77.76, 454.35, 57.15, 75.92 ], [ 30.2, 209.98, 217.17, 398.38 ], [ 33.08, 129.01, 225.8, 422.83 ], [ 0.11, 0.27, 62.4, 117.98 ], [ 71.8, 0.1, 39.85, ...
{ "bbox": [ [ 222.86, 186.51, 30.04, 41.67 ], [ 344.44, 96, 29.49, 85.27 ], [ 373.09, 94.31, 112.68, 96.72 ] ], "segmentation": [ [ [ 224.89, 210.97, 224.72, 210.13, 22...
{ "bbox": [ [ 55.29, 168.27, 70.63, 51.07 ], [ 244.65, 190.11, 18.24, 27.63 ], [ 151.94, 134.81, 205.06, 142.24 ], [ 20.9, 86.97, 39.1, 125.39 ], [ 364.6, 98.93, 19.05,...
{ "bbox": [ [ 203.04, 198.21, 272.92, 106.19 ], [ 122.3, 223.32, 72.28, 57.4 ], [ 220.26, 225.79, 45.29, 48.68 ] ], "segmentation": [ [ [ 243.92, 236.97, 244.45, 235.38, ...
{ "bbox": [ [ 281.97, 291.54, 36.32, 42.24 ], [ 384, 301.51, 151, 156.9 ], [ 182.43, 229.06, 38.6, 102.74 ], [ 263.78, 239.21, 30.44, 73.36 ], [ 114.67, 255.05, 40.69, ...
{ "bbox": [ [ 47.52, 62.87, 589.57, 412.12 ], [ 5.16, 152.84, 631.74, 320 ], [ 472.53, 119.18, 26.16, 12.64 ] ], "segmentation": [ [ [ 224.97, 74.51, 253.09, 68.69, 285...
{ "bbox": [ [ 166.24, 108.24, 101.2, 192.27 ], [ 319.56, 103.08, 320.44, 183.65 ], [ 3.82, 114.77, 302.82, 105.08 ], [ 0, 237.97, 133.38, 171.76 ], [ 237.54, 64.92, 284...
{ "bbox": [ [ 321.92, 292.11, 159.76, 55.71 ], [ 177.58, 26.91, 230.31, 346.54 ], [ 301.96, 145.62, 314.69, 123.83 ], [ 142.06, 229.24, 425.11, 126.99 ], [ 149.56, 276.38, ...
{ "bbox": [ [ 0, 1.06, 457.79, 474.2 ], [ 307.42, 218.79, 177.97, 220.04 ] ], "segmentation": [ [ [ 0, 226.09, 0, 474.3, 53.12, 475.26, 44.43, 469.47, 28.01, 46...
{ "bbox": [ [ 217.41, 159.26, 54.72, 44.77 ], [ 96.87, 182.13, 290.61, 181.77 ], [ 161.8, 126.2, 88.45, 257.8 ], [ 593.12, 161.12, 29.8, 76.45 ], [ 468.48, 63.23, 22.3,...
{ "bbox": [ [ 163.17, 297.02, 119.06, 242.76 ], [ 249.98, 54.21, 103.9, 128 ], [ 336.73, 51.7, 35.74, 34.85 ], [ 371.29, 122.47, 18.69, 32.22 ], [ 67.21, 81.01, 45.01, ...
{ "bbox": [ [ 248.24, 33.34, 73.94, 84.51 ], [ 144.55, 76.78, 230.41, 89.5 ], [ 177.42, 154.03, 319.35, 220.97 ], [ 39.69, 41.1, 128.78, 312.64 ], [ 15.75, 38.34, 63.7,...
{ "bbox": [ [ 184.54, 338.14, 176.11, 295.08 ], [ 34.53, 18.09, 174.84, 90.48 ], [ 120.48, 79.15, 78.73, 133.58 ], [ 58.74, 99.44, 80.71, 103.78 ] ], "segmentation": [ [ [ ...
{ "bbox": [ [ 230.53, 322.28, 136.01, 55.79 ], [ 238.48, 256.19, 169.31, 106.87 ], [ 201.79, 244.71, 30.17, 20.78 ], [ 46.69, 253.59, 36.74, 19.91 ], [ 0, 253.68, 31.49...
{ "bbox": [ [ 241.49, 188.11, 363.79, 317.15 ], [ 346.49, 200.28, 169.19, 148.45 ], [ 151.66, 1.73, 72.59, 70.13 ], [ 232.41, 0, 118.51, 332.88 ], [ 338.27, 4.6, 104.7,...
{ "bbox": [ [ 90.18, 135.76, 378.18, 339.39 ], [ 0, 69.03, 141.3, 373.22 ], [ 557.66, 1.15, 82.34, 472.45 ] ], "segmentation": [ [ [ 173.58, 474.18, 172.61, 446.06, 153...
{ "bbox": [ [ 307.24, 117.9, 218.59, 303.8 ], [ 159.53, 191.17, 44.58, 24.49 ], [ 137.14, 181.98, 50.26, 29.55 ], [ 61.57, 190.05, 23.6, 15.55 ], [ 56.29, 169.95, 116.0...
{ "bbox": [ [ 166.86, 107.04, 161.18, 322.37 ], [ 15.82, 2.97, 437.18, 625.62 ], [ 26.05, 232.89, 426.95, 407.11 ] ], "segmentation": [ [ [ 166.86, 429.41, 167.87, 407.25, ...
{ "bbox": [ [ 244.25, 150.35, 24.21, 57.87 ], [ 246.26, 154.21, 9.72, 8.82 ], [ 84.24, 210.99, 99.47, 142 ], [ 229.16, 203.02, 69.95, 150.36 ], [ 239.02, 231.83, 103.62...
End of preview. Expand in Data Studio

MS-COCO2017

Use the dataset

from random import randint
from datasets import load_dataset
from PIL import Image, ImageDraw, ImageFont

ds = load_dataset("ariG23498/coco2017", streaming=True, split="validation")

sample = next(iter(ds))


def draw_bboxes_on_image(
    image: Image.Image,
    objects: dict,
    category_names: dict = None,
    box_color: str = "red",
    text_color: str = "white"
) -> Image.Image:
    image_copy = image.copy()
    draw = ImageDraw.Draw(image_copy)
    font = ImageFont.load_default()

    bboxes = objects.get("bbox", [])
    categories = objects.get("categories", [])

    for bbox, category_id in zip(bboxes, categories):
        x, y, width, height = bbox
        x_min, y_min = x, y
        x_max, y_max = x + width, y + height

        # Draw bounding box
        draw.rectangle([x_min, y_min, x_max, y_max], outline=box_color, width=2)

        # Prepare label
        label = category_names.get(category_id, str(category_id)) if category_names else str(category_id)
        text_bbox = draw.textbbox((0, 0), label, font=font)
        text_width = text_bbox[2] - text_bbox[0]
        text_height = text_bbox[3] - text_bbox[1]
        label_top = max(y_min - text_height - 4, 0)

        # Draw label background and text
        draw.rectangle(
            [x_min, label_top, x_min + text_width + 4, label_top + text_height + 2],
            fill=box_color
        )
        draw.text((x_min + 2, label_top + 1), label, fill=text_color, font=font)

    return image_copy



def draw_segmaps_on_image(
    image: Image.Image,
    objects: dict,
    category_names: dict = None,
    alpha: float = 0.4,
    text_color: str = "white"
) -> Image.Image:
    base_image = image.convert("RGBA").copy()
    overlay = Image.new("RGBA", base_image.size, (255, 255, 255, 0))
    draw = ImageDraw.Draw(overlay)
    font = ImageFont.load_default()

    segmentations = objects.get("segmentation", [])
    categories = objects.get("categories", [])

    for segmentation, category_id in zip(segmentations, categories):
        polygons = segmentation if isinstance(segmentation[0], list) else [segmentation]
        label = category_names.get(category_id, str(category_id)) if category_names else str(category_id)

        for polygon in polygons:
            if len(polygon) >= 6:
                points = [(polygon[i], polygon[i + 1]) for i in range(0, len(polygon), 2)]

                # Draw filled polygon
                segmap_color = (randint(125, 255), randint(0, 125), randint(0, 255))
                rgba_fill = (*segmap_color, int(255 * alpha))
                draw.polygon(points, fill=rgba_fill)

                # Draw label at first vertex
                x0, y0 = points[0]
                draw.text((x0 + 2, y0 + 2), label, fill=text_color, font=font)

    return Image.alpha_composite(base_image, overlay).convert("RGB")

# For Bounding Boxes
od_image = draw_bboxes_on_image(
    image=sample["image"],
    objects=sample["objects"],
)

# For Segmentation Maps
segmap_image = draw_segmaps_on_image(
    image=sample["image"],
    objects=sample["objects"]
)

Get the categories

import json

with open("/content/annotations/instances_train2017.json") as f:
    instances = json.load(f)

instances["categories"]

Build the dataset and upload to Hub

!pip install -U -q datasets

# Download and unzip COCO 2017
!wget -q http://images.cocodataset.org/zips/train2017.zip
!wget -q http://images.cocodataset.org/zips/val2017.zip
!wget -q http://images.cocodataset.org/annotations/annotations_trainval2017.zip

!unzip -q train2017.zip
!unzip -q val2017.zip
!unzip -q annotations_trainval2017.zip

import json
import shutil
from pathlib import Path
from tqdm import tqdm
from datasets import load_dataset

base_dir = Path("/content")
splits = {
    "train": {
        "image_dir": base_dir / "train2017",
        "annotation_file": base_dir / "annotations" / "instances_train2017.json",
    },
    "val": {
        "image_dir": base_dir / "val2017",
        "annotation_file": base_dir / "annotations" / "instances_val2017.json",
    }
}
output_dir = base_dir / "coco_imagefolder"
output_dir.mkdir(parents=True, exist_ok=True)

def normalize_segmentation(segmentation):
    if isinstance(segmentation, list):
        if all(isinstance(poly, list) for poly in segmentation):
            return segmentation  # already a list of polygons
        elif all(isinstance(pt, (int, float)) for pt in segmentation):
            return [segmentation]  # wrap single polygon
    return []  # skip RLE or malformed segmentations

def convert_coco_to_jsonl(image_dir, annotation_path, output_metadata_path):
    with open(annotation_path) as f:
        data = json.load(f)

    id_to_filename = {img['id']: img['file_name'] for img in data['images']}
    annotations_by_image = {}

    for ann in data['annotations']:
        img_id = ann['image_id']
        bbox = ann['bbox']
        category = ann['category_id']
        segmentation = normalize_segmentation(ann['segmentation'])

        if not segmentation:
            continue  # skip if malformed or RLE

        if img_id not in annotations_by_image:
            annotations_by_image[img_id] = {
                "file_name": id_to_filename[img_id],
                "objects": {
                    "bbox": [],
                    "segmentation": [],
                    "categories": [],
                }
            }

        annotations_by_image[img_id]["objects"]["bbox"].append(bbox)
        annotations_by_image[img_id]["objects"]["segmentation"].append(segmentation)
        annotations_by_image[img_id]["objects"]["categories"].append(category)

    with open(output_metadata_path, "w") as f:
        for metadata in annotations_by_image.values():
            json.dump(metadata, f)
            f.write("\n")

# Build imagefolder structure
for split, info in splits.items():
    split_dir = output_dir / split
    split_dir.mkdir(parents=True, exist_ok=True)

    # Copy images
    for img_path in tqdm(info["image_dir"].glob("*.jpg"), desc=f"Copying {split} images"):
        shutil.copy(img_path, split_dir / img_path.name)

    # Write JSONL metadata
    metadata_path = split_dir / "metadata.jsonl"
    convert_coco_to_jsonl(split_dir, info["annotation_file"], metadata_path)

# Load and push
dataset = load_dataset("imagefolder", data_dir=str(output_dir))
dataset.push_to_hub("ariG23498/coco2017")
Downloads last month
53