yangwang825
commited on
Commit
•
34bcfc8
1
Parent(s):
3440857
Create audiocaps.py
Browse files- audiocaps.py +278 -0
audiocaps.py
ADDED
@@ -0,0 +1,278 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding=utf-8
|
2 |
+
|
3 |
+
"""AudioCaps dataset."""
|
4 |
+
|
5 |
+
|
6 |
+
import os
|
7 |
+
import gzip
|
8 |
+
import shutil
|
9 |
+
import pathlib
|
10 |
+
import logging
|
11 |
+
import datasets
|
12 |
+
import typing as tp
|
13 |
+
import pandas as pd
|
14 |
+
import urllib.request
|
15 |
+
from pathlib import Path
|
16 |
+
from copy import deepcopy
|
17 |
+
from tqdm.auto import tqdm
|
18 |
+
from rich.logging import RichHandler
|
19 |
+
|
20 |
+
logger = logging.getLogger(__name__)
|
21 |
+
logger.addHandler(RichHandler())
|
22 |
+
logger.setLevel(logging.INFO)
|
23 |
+
|
24 |
+
VERSION = "0.0.1"
|
25 |
+
|
26 |
+
# Cache location
|
27 |
+
DEFAULT_XDG_CACHE_HOME = "~/.cache"
|
28 |
+
XDG_CACHE_HOME = os.getenv("XDG_CACHE_HOME", DEFAULT_XDG_CACHE_HOME)
|
29 |
+
DEFAULT_HF_CACHE_HOME = os.path.join(XDG_CACHE_HOME, "huggingface")
|
30 |
+
HF_CACHE_HOME = os.path.expanduser(os.getenv("HF_HOME", DEFAULT_HF_CACHE_HOME))
|
31 |
+
DEFAULT_HF_DATASETS_CACHE = os.path.join(HF_CACHE_HOME, "datasets")
|
32 |
+
HF_DATASETS_CACHE = Path(os.getenv("HF_DATASETS_CACHE", DEFAULT_HF_DATASETS_CACHE))
|
33 |
+
|
34 |
+
|
35 |
+
class AudioCapsConfig(datasets.BuilderConfig):
|
36 |
+
"""BuilderConfig for AudioCaps."""
|
37 |
+
|
38 |
+
def __init__(self, features, **kwargs):
|
39 |
+
super(AudioCapsConfig, self).__init__(version=datasets.Version(VERSION, ""), **kwargs)
|
40 |
+
self.features = features
|
41 |
+
|
42 |
+
|
43 |
+
class AudioCaps(datasets.GeneratorBasedBuilder):
|
44 |
+
|
45 |
+
BUILDER_CONFIGS = [
|
46 |
+
AudioCapsKConfig(
|
47 |
+
features=datasets.Features(
|
48 |
+
{
|
49 |
+
"file": datasets.Value("string"),
|
50 |
+
"audio": datasets.Audio(sampling_rate=None),
|
51 |
+
"caption": datasets.Value("string"),
|
52 |
+
}
|
53 |
+
),
|
54 |
+
name="audiocaps",
|
55 |
+
description="",
|
56 |
+
),
|
57 |
+
]
|
58 |
+
|
59 |
+
DEFAULT_CONFIG_NAME = "audiocaps"
|
60 |
+
|
61 |
+
def _info(self):
|
62 |
+
return datasets.DatasetInfo(
|
63 |
+
description="",
|
64 |
+
features=self.config.features,
|
65 |
+
supervised_keys=None,
|
66 |
+
homepage="",
|
67 |
+
citation="",
|
68 |
+
task_templates=None,
|
69 |
+
)
|
70 |
+
|
71 |
+
def _split_generators(self, dl_manager):
|
72 |
+
"""Returns SplitGenerators."""
|
73 |
+
extensions = ['.wav']
|
74 |
+
|
75 |
+
# Development sets
|
76 |
+
train_audio_paths = []
|
77 |
+
for _filename in [f'train{i}.zip' for i in range(1, 12+1)]:
|
78 |
+
DEV_URL = f'https://huggingface.co/datasets/confit/audiocaps/resolve/main/data/{_filename}'
|
79 |
+
_dev_save_path = os.path.join(
|
80 |
+
HF_DATASETS_CACHE, 'confit___audiocaps/audiocaps', VERSION
|
81 |
+
)
|
82 |
+
download_file(
|
83 |
+
source=DEV_URL,
|
84 |
+
dest=os.path.join(_dev_save_path, _filename),
|
85 |
+
unpack=True,
|
86 |
+
dest_unpack=os.path.join(_dev_save_path, 'extracted', 'train'),
|
87 |
+
)
|
88 |
+
train_archive_path = os.path.join(_dev_save_path, 'extracted', 'train')
|
89 |
+
_, _walker = fast_scandir(train_archive_path, extensions, recursive=True)
|
90 |
+
train_audio_paths.extend(_walker)
|
91 |
+
|
92 |
+
# Validation set
|
93 |
+
VAL_URL = 'https://huggingface.co/datasets/confit/audiocaps/resolve/main/data/val.zip'
|
94 |
+
_val_save_path = os.path.join(
|
95 |
+
HF_DATASETS_CACHE, 'confit___audiocaps/audiocaps', VERSION
|
96 |
+
)
|
97 |
+
_filename = 'val.zip'
|
98 |
+
download_file(
|
99 |
+
source=VAL_URL,
|
100 |
+
dest=os.path.join(_val_save_path, _filename),
|
101 |
+
unpack=True,
|
102 |
+
dest_unpack=os.path.join(_val_save_path, 'extracted', 'validation'),
|
103 |
+
)
|
104 |
+
validation_archive_path = os.path.join(_val_save_path, 'extracted', 'validation')
|
105 |
+
_, validation_audio_paths = fast_scandir(validation_archive_path, extensions, recursive=True)
|
106 |
+
|
107 |
+
# Evaluation set
|
108 |
+
EVAL_URL = 'https://huggingface.co/datasets/confit/audiocaps/resolve/main/data/test.zip'
|
109 |
+
_eval_save_path = os.path.join(
|
110 |
+
HF_DATASETS_CACHE, 'confit___audiocaps/audiocaps', VERSION
|
111 |
+
)
|
112 |
+
_filename = 'test.zip'
|
113 |
+
download_file(
|
114 |
+
source=EVAL_URL,
|
115 |
+
dest=os.path.join(_eval_save_path, _filename),
|
116 |
+
unpack=True,
|
117 |
+
dest_unpack=os.path.join(_eval_save_path, 'extracted', 'test'),
|
118 |
+
)
|
119 |
+
test_archive_path = os.path.join(_eval_save_path, 'extracted', 'test')
|
120 |
+
_, test_audio_paths = fast_scandir(test_archive_path, extensions, recursive=True)
|
121 |
+
|
122 |
+
return [
|
123 |
+
datasets.SplitGenerator(
|
124 |
+
name=datasets.Split.TRAIN, gen_kwargs={"audio_paths": train_audio_paths, "split": "train"}
|
125 |
+
),
|
126 |
+
datasets.SplitGenerator(
|
127 |
+
name=datasets.Split.VALIDATION, gen_kwargs={"audio_paths": validation_audio_paths, "split": "validation"}
|
128 |
+
),
|
129 |
+
datasets.SplitGenerator(
|
130 |
+
name=datasets.Split.TEST, gen_kwargs={"audio_paths": test_audio_paths, "split": "test"}
|
131 |
+
),
|
132 |
+
]
|
133 |
+
|
134 |
+
def _generate_examples(self, audio_paths, split=None):
|
135 |
+
if split == 'train':
|
136 |
+
metadata_df = pd.read_csv('https://huggingface.co/datasets/confit/audiocaps/raw/main/metadata/train.csv')
|
137 |
+
elif split == 'validation':
|
138 |
+
metadata_df = pd.read_csv('https://huggingface.co/datasets/confit/audiocaps/raw/main/metadata/val.csv')
|
139 |
+
elif split == 'test':
|
140 |
+
metadata_df = pd.read_csv('https://huggingface.co/datasets/confit/audiocaps/raw/main/metadata/test.csv')
|
141 |
+
|
142 |
+
fileid2caption = {}
|
143 |
+
for idx, row in metadata_df.iterrows():
|
144 |
+
fileid2caption[f"{row['audiocap_id']}.wav"] = row['caption'] # this filename doesn't have suffix
|
145 |
+
|
146 |
+
for guid, audio_path in enumerate(audio_paths):
|
147 |
+
fileid = Path(audio_path).name
|
148 |
+
caption = fileid2caption.get(fileid)
|
149 |
+
yield guid, {
|
150 |
+
"id": str(guid),
|
151 |
+
"file": audio_path,
|
152 |
+
"audio": audio_path,
|
153 |
+
"caption": caption,
|
154 |
+
}
|
155 |
+
|
156 |
+
|
157 |
+
def fast_scandir(path: str, exts: tp.List[str], recursive: bool = False):
|
158 |
+
# Scan files recursively faster than glob
|
159 |
+
# From github.com/drscotthawley/aeiou/blob/main/aeiou/core.py
|
160 |
+
subfolders, files = [], []
|
161 |
+
|
162 |
+
try: # hope to avoid 'permission denied' by this try
|
163 |
+
for f in os.scandir(path):
|
164 |
+
try: # 'hope to avoid too many levels of symbolic links' error
|
165 |
+
if f.is_dir():
|
166 |
+
subfolders.append(f.path)
|
167 |
+
elif f.is_file():
|
168 |
+
if os.path.splitext(f.name)[1].lower() in exts:
|
169 |
+
files.append(f.path)
|
170 |
+
except Exception:
|
171 |
+
pass
|
172 |
+
except Exception:
|
173 |
+
pass
|
174 |
+
|
175 |
+
if recursive:
|
176 |
+
for path in list(subfolders):
|
177 |
+
sf, f = fast_scandir(path, exts, recursive=recursive)
|
178 |
+
subfolders.extend(sf)
|
179 |
+
files.extend(f) # type: ignore
|
180 |
+
|
181 |
+
return subfolders, files
|
182 |
+
|
183 |
+
|
184 |
+
def download_file(
|
185 |
+
source,
|
186 |
+
dest,
|
187 |
+
unpack=False,
|
188 |
+
dest_unpack=None,
|
189 |
+
replace_existing=False,
|
190 |
+
write_permissions=False,
|
191 |
+
):
|
192 |
+
"""Downloads the file from the given source and saves it in the given
|
193 |
+
destination path.
|
194 |
+
Arguments
|
195 |
+
---------
|
196 |
+
source : path or url
|
197 |
+
Path of the source file. If the source is an URL, it downloads it from
|
198 |
+
the web.
|
199 |
+
dest : path
|
200 |
+
Destination path.
|
201 |
+
unpack : bool
|
202 |
+
If True, it unpacks the data in the dest folder.
|
203 |
+
dest_unpack: path
|
204 |
+
Path where to store the unpacked dataset
|
205 |
+
replace_existing : bool
|
206 |
+
If True, replaces the existing files.
|
207 |
+
write_permissions: bool
|
208 |
+
When set to True, all the files in the dest_unpack directory will be granted write permissions.
|
209 |
+
This option is active only when unpack=True.
|
210 |
+
"""
|
211 |
+
class DownloadProgressBar(tqdm):
|
212 |
+
"""DownloadProgressBar class."""
|
213 |
+
|
214 |
+
def update_to(self, b=1, bsize=1, tsize=None):
|
215 |
+
"""Needed to support multigpu training."""
|
216 |
+
if tsize is not None:
|
217 |
+
self.total = tsize
|
218 |
+
self.update(b * bsize - self.n)
|
219 |
+
|
220 |
+
# Create the destination directory if it doesn't exist
|
221 |
+
dest_dir = pathlib.Path(dest).resolve().parent
|
222 |
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
223 |
+
if "http" not in source:
|
224 |
+
shutil.copyfile(source, dest)
|
225 |
+
|
226 |
+
elif not os.path.isfile(dest) or (
|
227 |
+
os.path.isfile(dest) and replace_existing
|
228 |
+
):
|
229 |
+
logger.info(f"Downloading {source} to {dest}")
|
230 |
+
with DownloadProgressBar(
|
231 |
+
unit="B",
|
232 |
+
unit_scale=True,
|
233 |
+
miniters=1,
|
234 |
+
desc=source.split("/")[-1],
|
235 |
+
) as t:
|
236 |
+
urllib.request.urlretrieve(
|
237 |
+
source, filename=dest, reporthook=t.update_to
|
238 |
+
)
|
239 |
+
else:
|
240 |
+
logger.info(f"{dest} exists. Skipping download")
|
241 |
+
|
242 |
+
# Unpack if necessary
|
243 |
+
if unpack:
|
244 |
+
if dest_unpack is None:
|
245 |
+
dest_unpack = os.path.dirname(dest)
|
246 |
+
if os.path.exists(dest_unpack):
|
247 |
+
logger.info(f"{dest_unpack} already exists. Skipping extraction")
|
248 |
+
else:
|
249 |
+
logger.info(f"Extracting {dest} to {dest_unpack}")
|
250 |
+
# shutil unpack_archive does not work with tar.gz files
|
251 |
+
if (
|
252 |
+
source.endswith(".tar.gz")
|
253 |
+
or source.endswith(".tgz")
|
254 |
+
or source.endswith(".gz")
|
255 |
+
):
|
256 |
+
out = dest.replace(".gz", "")
|
257 |
+
with gzip.open(dest, "rb") as f_in:
|
258 |
+
with open(out, "wb") as f_out:
|
259 |
+
shutil.copyfileobj(f_in, f_out)
|
260 |
+
else:
|
261 |
+
shutil.unpack_archive(dest, dest_unpack)
|
262 |
+
if write_permissions:
|
263 |
+
set_writing_permissions(dest_unpack)
|
264 |
+
|
265 |
+
|
266 |
+
def set_writing_permissions(folder_path):
|
267 |
+
"""
|
268 |
+
This function sets user writing permissions to all the files in the given folder.
|
269 |
+
Arguments
|
270 |
+
---------
|
271 |
+
folder_path : folder
|
272 |
+
Folder whose files will be granted write permissions.
|
273 |
+
"""
|
274 |
+
for root, dirs, files in os.walk(folder_path):
|
275 |
+
for file_name in files:
|
276 |
+
file_path = os.path.join(root, file_name)
|
277 |
+
# Set writing permissions (mode 0o666) to the file
|
278 |
+
os.chmod(file_path, 0o666)
|