|
|
| import datasets
|
| import logging
|
|
|
|
|
| logger = logging.getLogger(__name__)
|
| logging.basicConfig(level=logging.INFO)
|
|
|
| class ForestSegmentationDataset(datasets.GeneratorBasedBuilder):
|
| def _info(self):
|
| logger.info("Defining dataset schema...")
|
| return datasets.DatasetInfo(
|
| features=datasets.Features({
|
| "sample_id": datasets.Value("string"),
|
| "image_paths": datasets.Sequence(datasets.Value("string")),
|
| "mask": datasets.Value("string"),
|
| }),
|
| )
|
|
|
| def _split_generators(self, dl_manager):
|
| logger.info("Loading sample stream from index.parquet...")
|
| sample_stream = dl_manager.iter_parquet("index.parquet")
|
| logger.info("Sample stream loaded successfully.")
|
| return [
|
| datasets.SplitGenerator(
|
| name=datasets.Split.TRAIN,
|
| gen_kwargs={"samples": sample_stream}
|
| )
|
| ]
|
|
|
| def _generate_examples(self, samples):
|
| logger.info("Starting to generate examples...")
|
| for i, sample in enumerate(samples):
|
| if i % 1000 == 0:
|
| logger.info(f"Processed {i} samples...")
|
| try:
|
| yield sample["sample_id"], {
|
| "sample_id": sample["sample_id"],
|
| "image_paths": sample["image_paths"],
|
| "mask": sample["mask_path"],
|
| }
|
| except Exception as e:
|
| logger.error(f"Error processing sample {sample.get('sample_id', 'unknown')}: {e}")
|
|
|