Datasets:

Modalities:
Geospatial
Languages:
English
DOI:
Libraries:
License:
File size: 1,613 Bytes
a248601
73c6f98
ceb6a3d
 
 
 
 
7dcdc23
80c0b47
73c6f98
ceb6a3d
73c6f98
 
b7d4966
df111b5
 
73c6f98
 
 
 
ceb6a3d
 
 
b7d4966
 
 
97064ef
b7d4966
 
 
97064ef
ceb6a3d
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44

import datasets
import logging

# Configure logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)

class ForestSegmentationDataset(datasets.GeneratorBasedBuilder):
    def _info(self):
        logger.info("Defining dataset schema...")
        return datasets.DatasetInfo(
            features=datasets.Features({
                "sample_id": datasets.Value("string"),
                "image_paths": datasets.Sequence(datasets.Value("string")),
                "mask": datasets.Value("string"),
            }),
        )

    def _split_generators(self, dl_manager):
        logger.info("Loading sample stream from index.parquet...")
        sample_stream = dl_manager.iter_parquet("index.parquet")
        logger.info("Sample stream loaded successfully.")
        return [
            datasets.SplitGenerator(
                name=datasets.Split.TRAIN,
                gen_kwargs={"samples": sample_stream}
            )
        ]

    def _generate_examples(self, samples):
        logger.info("Starting to generate examples...")
        for i, sample in enumerate(samples):
            if i % 1000 == 0:
                logger.info(f"Processed {i} samples...")
            try:
                yield sample["sample_id"], {
                    "sample_id": sample["sample_id"],
                    "image_paths": sample["image_paths"],
                    "mask": sample["mask_path"],
                }
            except Exception as e:
                logger.error(f"Error processing sample {sample.get('sample_id', 'unknown')}: {e}")