SLAKE / parsing_code.py
harpreetsahota's picture
Create parsing_code.py
bc093d5 verified
import os
import json
import fiftyone as fo
from PIL import Image
from pathlib import Path
def load_sample_files(subdir):
"""
Load all required files for a single sample.
Args:
subdir (Path): Path to the sample subdirectory
Returns:
tuple: (detections_data, questions_data, mask_file_path, source_file_path, img_dimensions)
Returns None if any required files are missing.
"""
subdir_name = subdir.name
# Define file paths
detection_file = subdir / "detection.json"
question_file = subdir / "question.json"
mask_file = subdir / f"mask_{subdir_name}.png"
source_file = subdir / f"source_{subdir_name}.jpg"
# Check all files exist
if not all(f.exists() for f in [detection_file, question_file, mask_file, source_file]):
return None
# Load JSON data
with open(detection_file, 'r') as f:
detections_data = json.load(f)
with open(question_file, 'r') as f:
questions_data = json.load(f)
# Get image dimensions
with Image.open(source_file) as img:
img_dimensions = img.size
return detections_data, questions_data, mask_file, source_file, img_dimensions
def convert_detections_to_relative(detections_data, img_width, img_height):
"""
Convert absolute bounding boxes to relative coordinates for FiftyOne.
Args:
detections_data (list): List of detection dictionaries
img_width (int): Image width in pixels
img_height (int): Image height in pixels
Returns:
fo.Detections: FiftyOne Detections object
"""
detections = []
for detection_dict in detections_data:
for label, bbox in detection_dict.items():
x, y, width, height = bbox
# Convert to relative coordinates
rel_x = x / img_width
rel_y = y / img_height
rel_width = width / img_width
rel_height = height / img_height
detection = fo.Detection(
label=label,
bounding_box=[rel_x, rel_y, rel_width, rel_height]
)
detections.append(detection)
return fo.Detections(detections=detections)
def add_sample_metadata(sample, english_questions):
"""
Add sample-level metadata from questions data.
Args:
sample (fo.Sample): FiftyOne sample to modify
english_questions (list): List of English question dictionaries
"""
if not english_questions:
return
# Sample-level metadata (same for all questions in a sample)
first_question = english_questions[0]
sample['location'] = fo.Classification(label=first_question['location'])
sample['modality'] = fo.Classification(label=first_question['modality'])
sample['base_type'] = fo.Classification(label=first_question['base_type'])
sample['answer_type'] = fo.Classification(label=first_question['answer_type'])
def add_questions_and_answers(sample, english_questions):
"""
Add individual questions and answers to the sample.
Args:
sample (fo.Sample): FiftyOne sample to modify
english_questions (list): List of English question dictionaries
"""
for i, q_data in enumerate(english_questions):
sample[f'question_{i}'] = q_data['question']
sample[f'answer_{i}'] = fo.Classification(label=q_data['answer'])
def process_single_sample(subdir):
"""
Process a single sample directory into a FiftyOne sample.
Args:
subdir (Path): Path to the sample subdirectory
Returns:
fo.Sample or None: FiftyOne sample, or None if processing failed
"""
subdir_name = subdir.name
# Load all files for this sample
file_data = load_sample_files(subdir)
if file_data is None:
print(f"Warning: Missing files in {subdir_name}, skipping...")
return None
detections_data, questions_data, mask_file, source_file, (img_width, img_height) = file_data
# Create FiftyOne sample
sample = fo.Sample(filepath=str(source_file.absolute()))
# Add detections
sample['detections'] = convert_detections_to_relative(detections_data, img_width, img_height)
# Add segmentation mask
sample['segmentation'] = fo.Segmentation(mask_path=str(mask_file.absolute()))
# Filter to English questions only and preserve order
english_questions = [q for q in questions_data if q.get('q_lang') == 'en']
# Add sample-level metadata
add_sample_metadata(sample, english_questions)
# Add individual questions and answers
add_questions_and_answers(sample, english_questions)
return sample
def parse_slake_dataset(data_root="SLAKE/imgs", dataset_name="SLAKE"):
"""
Parse SLAKE dataset into FiftyOne format.
Args:
data_root (str): Path to the SLAKE/imgs directory
dataset_name (str): Name for the FiftyOne dataset
Returns:
fo.Dataset: FiftyOne dataset with parsed samples
"""
dataset = fo.Dataset(dataset_name, overwrite=True)
data_root = Path(data_root)
samples = []
# Process each subdirectory
for subdir in data_root.iterdir():
if not subdir.is_dir():
continue
print(f"Processing {subdir.name}...")
sample = process_single_sample(subdir)
if sample is not None:
samples.append(sample)
# Add all samples to dataset efficiently
dataset.add_samples(samples)
dataset.compute_metadata()
return dataset
import fiftyone as fo
from pathlib import Path
def load_mask_targets_from_file(mask_targets_file):
"""
Load mask targets mapping from file.
Args:
mask_targets_file (str): Path to the mask targets file
Returns:
dict: Mapping of pixel values to organ labels
"""
mask_targets = {}
with open(mask_targets_file, 'r') as f:
for line in f:
line = line.strip()
if ':' in line:
pixel_value, label = line.split(':', 1)
mask_targets[int(pixel_value)] = label
return mask_targets
def set_dataset_mask_targets(dataset_name, mask_targets_file, segmentation_field="segmentation"):
"""
Set mask targets for an existing FiftyOne dataset.
Args:
dataset_name (str): Name of the FiftyOne dataset
mask_targets_file (str): Path to the mask targets mapping file
segmentation_field (str): Name of the segmentation field (default: "segmentation")
"""
# Load dataset
dataset = fo.load_dataset(dataset_name)
# Load mask targets from file
mask_targets = load_mask_targets_from_file(mask_targets_file)
# Set mask targets
dataset.mask_targets = {segmentation_field: mask_targets}
dataset.save() # Must save after setting mask targets
for i, (pixel_val, label) in enumerate(list(mask_targets.items())[:5]):
print(f" {pixel_val}: {label}")
if len(mask_targets) > 5:
print(f" ... and {len(mask_targets) - 5} more")
dataset = parse_slake_dataset("SLAKE/imgs", "SLAKE")
set_dataset_mask_targets(
dataset_name="SLAKE", # Your dataset name
mask_targets_file="SLAKE/mask.txt", # Your mapping file
segmentation_field="segmentation"
)