dave / dave.py
gorjanradevski's picture
Upload dave.py with huggingface_hub
fa4c071 verified
import json
import os
import datasets
_DESCRIPTION = "Dataset with video and audio references for epic and ego4d tasks."
_HOMEPAGE = "https://huggingface.co/datasets/gorjanradevski/dave"
_LICENSE = "MIT"
_MEDIA_FIELDS = [
"compressed_video_path",
"event_video_path",
"video_with_overlayed_audio_path",
"silent_video_path",
"overlayed_audio_path",
]
def count_files_in_directory(directory):
return sum(len(files) for _, _, files in os.walk(directory))
class DaveDataset(datasets.GeneratorBasedBuilder):
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features({
"compressed_video_path": datasets.Value("string"),
"overlayed_event_index": datasets.Value("int32"),
"events": [
{
"start": datasets.Value("string"),
"end": datasets.Value("string"),
"duration": datasets.Value("float64"),
"narration": datasets.Value("string"),
"action": datasets.Value("string"),
"raw_narration": datasets.Value("string"),
}
],
"event_video_path": datasets.Value("string"),
"audio_class": datasets.Value("string"),
"video_with_overlayed_audio_path": datasets.Value("string"),
"silent_video_path": datasets.Value("string"),
"overlayed_audio_path": datasets.Value("string"),
"video_id": datasets.Value("string"),
"participant_id": datasets.Value("string"),
"type": datasets.Value("string"),
"raw_choices_simple_audio_classification": datasets.Sequence(datasets.Value("string")),
"raw_choices_overlayed_full_audio_classification": datasets.Sequence(datasets.Value("string")),
"raw_choices_video_segment": datasets.Sequence(datasets.Value("string")),
"correct_temporal_order": datasets.Sequence(datasets.Value("string")),
"raw_choices_temporal_video": datasets.Sequence(datasets.Value("string")),
"raw_choices_multimodal": datasets.Sequence(datasets.Value("string")),
"raw_choices_silent_video": datasets.Sequence(datasets.Value("string")),
"raw_choices_audio": datasets.Sequence(datasets.Value("string")),
"raw_choices_text_only": datasets.Sequence(datasets.Value("string")),
"raw_choices_pipeline_event_classification": datasets.Sequence(datasets.Value("string")),
}),
homepage=_HOMEPAGE,
license=_LICENSE,
)
def _split_generators(self, dl_manager):
base_url = "https://huggingface.co/datasets/gorjanradevski/dave/resolve/main/"
# Add both epic and ego4d files and json files
json_files = {"ego4d": "ego4d.json", "epic": "epic.json"}
zip_urls = {"ego4d": base_url + "ego4d.zip", "epic": base_url + "epic.zip"}
split_generators = []
for split_name, json_file in json_files.items():
# Download JSON metadata file
json_path = dl_manager.download(base_url + json_file)
# Download and extract ZIP file
print(f"Downloading and extracting {split_name}_files.zip...")
extracted_dir = dl_manager.download_and_extract(zip_urls[split_name])
print(f"Extracted to: {extracted_dir}")
print(f"Total number of files extracted: {count_files_in_directory(extracted_dir)}")
# Path to the extracted directory
if isinstance(extracted_dir, str):
files_dir = extracted_dir
else:
files_dir = extracted_dir[zip_urls[split_name]]
split_generators.append(
datasets.SplitGenerator(
name=split_name,
gen_kwargs={
"json_path": json_path,
"files_dir": files_dir,
"split_name": split_name,
},
)
)
return split_generators
def _generate_examples(self, json_path, files_dir, split_name):
with open(json_path, "r", encoding="utf-8") as f:
data = json.load(f)
print(f"Processing {split_name} split with extracted files in {files_dir}")
# Check for the respective files directory based on the split
files_dir = os.path.join(files_dir, f"{split_name}_files")
if not os.path.exists(files_dir):
print(f"Warning: '{split_name}_files' directory not found in {files_dir}")
print(f"Available directories: {os.listdir(files_dir)}")
raise ValueError(f"Could not find '{split_name}_files' directory at {files_dir}")
# Create a mapping of original file paths to local file paths
file_mapping = {}
for idx, item in enumerate(data):
# Debug first item
if idx == 0:
print(f"Processing first item: {item['video_id'] if 'video_id' in item else 'unknown'}")
# Replace file paths with local paths for all media fields
all_fields_resolved = True
for field in _MEDIA_FIELDS:
if field not in item or not item[field]:
continue
original_path = item[field]
# Check if we already processed this file path
if original_path in file_mapping:
item[field] = file_mapping[original_path]
continue
# Extract file name and construct local path
file_name = os.path.basename(original_path)
local_path = os.path.join(files_dir, file_name)
# Check if the file exists
if os.path.exists(local_path):
item[field] = local_path
file_mapping[original_path] = local_path
else:
print(f"Warning: File not found for {field}: {local_path}")
all_fields_resolved = False
break
if all_fields_resolved:
yield idx, item