youtube-persian-asr / asr_dataset.py
PerSets's picture
fix: error handling
e8461b5
raw
history blame
6.33 kB
import os
import tarfile
import pandas as pd
import datasets
from datasets import Audio, Value, Features
import logging
from typing import Dict, Generator, Tuple
logger = logging.getLogger(__name__)
_DESCRIPTION = """
This dataset consists of various Youtube videos in Persian language.
Note: This dataset contains raw, unvalidated transcriptions. Users are advised to:
1. Perform their own quality assessment
2. Create their own train/validation/test splits based on their specific needs
3. Validate a subset of the data if needed for their use case
"""
_CITATION = """
Use this repo info/link for citation.
"""
class ASRDataset(datasets.GeneratorBasedBuilder):
"""ASR dataset with audio files stored in tar archives."""
VERSION = datasets.Version("1.0.0")
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features({
"file_name": datasets.Value("string"),
"audio": datasets.Audio(sampling_rate=16000),
"sentence": datasets.Value("string"),
"tar_file": datasets.Value("string"),
}),
supervised_keys=None,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators with added error handling."""
try:
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"split": "train",
"data_dir": self.config.data_dir,
},
),
]
except Exception as e:
logger.error(f"Error in _split_generators: {e}")
logger.error(traceback.format_exc())
raise
def _prepare_metadata(self, data_dir: str) -> pd.DataFrame:
"""Prepare metadata with comprehensive error handling."""
try:
logger.info("Preparing metadata with tar file information...")
# Read metadata
metadata_path = os.path.join(data_dir, "metadata.csv")
# Add error handling for metadata reading
try:
df = pd.read_csv(metadata_path, sep='\t', names=['file_name', 'sentence'], encoding="utf-8")
except Exception as read_error:
logger.error(f"Failed to read metadata file: {read_error}")
raise
# Validate dataframe
if df.empty:
raise ValueError("Metadata file is empty")
# Add tar_file column
clips_dir = os.path.join(data_dir, "clips")
tar_files = [f for f in os.listdir(clips_dir) if f.endswith('.tar')]
# Initialize tar_file column
df['tar_file'] = None
# Find which tar file contains each audio file
for tar_file in tar_files:
tar_path = os.path.join(clips_dir, tar_file)
try:
with tarfile.open(tar_path, 'r') as tar:
file_list = tar.getnames()
mask = df['file_name'].isin(file_list)
df.loc[mask, 'tar_file'] = tar_file
except Exception as tar_error:
logger.warning(f"Error processing tar file {tar_file}: {tar_error}")
# Remove entries where tar_file is None (file not found in any tar)
df = df.dropna(subset=['tar_file'])
# Additional logging
logger.info(f"Total entries after tar file mapping: {len(df)}")
return df
except Exception as e:
logger.error(f"Unexpected error in _prepare_metadata: {e}")
logger.error(traceback.format_exc())
raise
def _generate_examples(self, split, data_dir):
"""Yields examples with comprehensive error handling."""
try:
# Prepare metadata with tar file information
df = self._prepare_metadata(data_dir)
clips_dir = os.path.join(data_dir, "clips")
idx = 0
# Group by tar_file for efficient processing
for tar_file, group in df.groupby('tar_file'):
tar_path = os.path.join(clips_dir, tar_file)
logger.info(f"Processing tar file: {tar_file}")
try:
with tarfile.open(tar_path, 'r') as tar:
for _, row in group.iterrows():
try:
# More robust file extraction
member = tar.getmember(row['file_name'])
f = tar.extractfile(member)
if f is None:
logger.warning(f"Could not extract file: {row['file_name']}")
continue
audio_bytes = f.read()
f.close()
yield idx, {
"file_name": row['file_name'],
"audio": {"path": f"{tar_path}::{row['file_name']}", "bytes": audio_bytes},
"sentence": row['sentence'],
"tar_file": tar_file,
}
idx += 1
except Exception as file_error:
logger.warning(f"Error processing file {row['file_name']}: {file_error}")
continue
except Exception as tar_error:
logger.error(f"Error processing tar file {tar_file}: {tar_error}")
continue
logger.info(f"Total examples generated: {idx}")
except Exception as e:
logger.error(f"Unexpected error in _generate_examples: {e}")
logger.error(traceback.format_exc())
raise