Datasets:

Tasks:
Other
Languages:
Chinese
ArXiv:
License:
shunk031 commited on
Commit
58c8412
1 Parent(s): 6b3fa2d

remove 7 files

Browse files
.github/workflows/ci.yaml DELETED
@@ -1,49 +0,0 @@
1
- name: CI
2
-
3
- on:
4
- push:
5
- branches: [main]
6
- pull_request:
7
- branches: [main]
8
- paths-ignore:
9
- - "README.md"
10
-
11
- jobs:
12
- test:
13
- runs-on: ubuntu-latest
14
- strategy:
15
- matrix:
16
- python-version: ["3.9", "3.10"]
17
-
18
- steps:
19
- - uses: actions/checkout@v3
20
-
21
- - name: Set up Python ${{ matrix.python-version }}
22
- uses: actions/setup-python@v4
23
- with:
24
- python-version: ${{ matrix.python-version }}
25
-
26
- - name: Install dependencies
27
- run: |
28
- pip install -U pip setuptools wheel poetry
29
- poetry install
30
-
31
- - name: Format
32
- run: |
33
- poetry run black --check .
34
-
35
- - name: Lint
36
- run: |
37
- poetry run ruff .
38
-
39
- - name: Type check
40
- run: |
41
- poetry run mypy . \
42
- --ignore-missing-imports \
43
- --no-strict-optional \
44
- --no-site-packages \
45
- --cache-dir=/dev/null
46
-
47
- - name: Run tests
48
- run: |
49
- poetry run pytest --color=yes -rf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.github/workflows/push_to_hub.yaml DELETED
@@ -1,26 +0,0 @@
1
- name: Sync to Hugging Face Hub
2
-
3
- on:
4
- workflow_run:
5
- workflows:
6
- - CI
7
- branches:
8
- - main
9
- types:
10
- - completed
11
-
12
- jobs:
13
- push_to_hub:
14
- runs-on: ubuntu-latest
15
-
16
- steps:
17
- - name: Checkout repository
18
- uses: actions/checkout@v3
19
-
20
- - name: Push to Huggingface hub
21
- env:
22
- HF_TOKEN: ${{ secrets.HF_TOKEN }}
23
- HF_USERNAME: ${{ secrets.HF_USERNAME }}
24
- run: |
25
- git fetch --unshallow
26
- git push --force https://${HF_USERNAME}:${HF_TOKEN}@huggingface.co/datasets/${HF_USERNAME}/PKU-PosterLayout main
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
PKU-PosterLayout.py DELETED
@@ -1,279 +0,0 @@
1
- import os
2
- import pathlib
3
- from typing import List, TypedDict, Union, cast
4
-
5
- import datasets as ds
6
- from datasets.utils.logging import get_logger
7
- from PIL import Image
8
- from PIL.Image import Image as PilImage
9
-
10
- logger = get_logger(__name__)
11
-
12
- _DESCRIPTION = (
13
- "A New Dataset and Benchmark for Content-aware Visual-Textual Presentation Layout"
14
- )
15
-
16
- _CITATION = """\
17
- @inproceedings{hsu2023posterlayout,
18
- title={PosterLayout: A New Benchmark and Approach for Content-aware Visual-Textual Presentation Layout},
19
- author={Hsu, Hsiao Yuan and He, Xiangteng and Peng, Yuxin and Kong, Hao and Zhang, Qing},
20
- booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
21
- pages={6018--6026},
22
- year={2023}
23
- }
24
- """
25
-
26
- _HOMEPAGE = "http://59.108.48.34/tiki/PosterLayout/"
27
-
28
- _LICENSE = "Images in PKU PosterLayout are distributed under the CC BY-SA 4.0 license."
29
-
30
-
31
- class TrainPoster(TypedDict):
32
- original: str
33
- inpainted: str
34
-
35
-
36
- class TestPoster(TypedDict):
37
- canvas: str
38
-
39
-
40
- class SaliencyMaps(TypedDict):
41
- pfpn: str
42
- basnet: str
43
-
44
-
45
- class TrainDataset(TypedDict):
46
- poster: TrainPoster
47
- saliency_maps: SaliencyMaps
48
-
49
-
50
- class TestDataset(TypedDict):
51
- poster: TestPoster
52
- saliency_maps: SaliencyMaps
53
-
54
-
55
- class DatasetUrls(TypedDict):
56
- train: TrainDataset
57
- test: TestDataset
58
-
59
-
60
- # The author of this loading script has uploaded the poster image and saliency maps to the HuggingFace's private repository to facilitate testing.
61
- # If you are using this loading script, please download the annotations from the appropriate channels, such as the OneDrive link provided by the Magazine dataset's author.
62
- # (To the author of Magazine dataset, if there are any issues regarding this matter, please contact us. We will address it promptly.)
63
- _URLS: DatasetUrls = {
64
- "train": {
65
- "poster": {
66
- "original": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/original_poster.zip",
67
- "inpainted": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/inpainted_poster.zip",
68
- },
69
- "saliency_maps": {
70
- "pfpn": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/saliencymaps_pfpn.zip",
71
- "basnet": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/saliencymaps_basnet.zip",
72
- },
73
- },
74
- "test": {
75
- "poster": {
76
- "canvas": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/test/image_canvas.zip",
77
- },
78
- "saliency_maps": {
79
- "pfpn": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/test/saliencymaps_pfpn.zip",
80
- "basnet": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/test/saliencymaps_basnet.zip",
81
- },
82
- },
83
- }
84
-
85
-
86
- def file_sorter(f: pathlib.Path) -> int:
87
- idx, *_ = f.stem.split("_")
88
- return int(idx)
89
-
90
-
91
- def load_image(file_path: pathlib.Path) -> PilImage:
92
- logger.info(f"Load from {file_path}")
93
- return Image.open(file_path)
94
-
95
-
96
- def get_original_poster_files(base_dir: str) -> List[pathlib.Path]:
97
- poster_dir = pathlib.Path(base_dir) / "original_poster"
98
- return sorted(poster_dir.iterdir(), key=lambda f: int(f.stem))
99
-
100
-
101
- def get_inpainted_poster_files(base_dir: str) -> List[pathlib.Path]:
102
- inpainted_dir = pathlib.Path(base_dir) / "inpainted_poster"
103
- return sorted(inpainted_dir.iterdir(), key=file_sorter)
104
-
105
-
106
- def get_basnet_map_files(base_dir: str) -> List[pathlib.Path]:
107
- basnet_map_dir = pathlib.Path(base_dir) / "saliencymaps_basnet"
108
- return sorted(basnet_map_dir.iterdir(), key=file_sorter)
109
-
110
-
111
- def get_pfpn_map_files(base_dir: str) -> List[pathlib.Path]:
112
- pfpn_map_dir = pathlib.Path(base_dir) / "saliencymaps_pfpn"
113
- return sorted(pfpn_map_dir.iterdir(), key=file_sorter)
114
-
115
-
116
- def get_canvas_files(base_dir: str) -> List[pathlib.Path]:
117
- canvas_dir = pathlib.Path(base_dir) / "image_canvas"
118
- return sorted(canvas_dir.iterdir(), key=lambda f: int(f.stem))
119
-
120
-
121
- class PosterLayoutDataset(ds.GeneratorBasedBuilder):
122
- VERSION = ds.Version("1.0.0")
123
- BUILDER_CONFIGS = [ds.BuilderConfig(version=VERSION)]
124
-
125
- def _info(self) -> ds.DatasetInfo:
126
- features = ds.Features(
127
- {
128
- "original_poster": ds.Image(),
129
- "inpainted_poster": ds.Image(),
130
- "basnet_saliency_map": ds.Image(),
131
- "pfpn_saliency_map": ds.Image(),
132
- "canvas": ds.Image(),
133
- }
134
- )
135
- return ds.DatasetInfo(
136
- description=_DESCRIPTION,
137
- citation=_CITATION,
138
- homepage=_HOMEPAGE,
139
- license=_LICENSE,
140
- features=features,
141
- )
142
-
143
- @property
144
- def _manual_download_instructions(self) -> str:
145
- return (
146
- "To use PKU-PosterLayout dataset, you need to download the poster image "
147
- "and saliency maps via [PKU Netdisk](https://disk.pku.edu.cn/link/999C6E97BB354DF8AD0F9E1F9003BE05) "
148
- "or [Google Drive](https://drive.google.com/drive/folders/1Gk202RVs9Qy2zbJUNeurC1CaQYNU-Vuv?usp=share_link)."
149
- )
150
-
151
- def _download_from_hf(self, dl_manager: ds.DownloadManager) -> DatasetUrls:
152
- return dl_manager.download_and_extract(_URLS)
153
-
154
- def _download_from_local(self, dl_manager: ds.DownloadManager) -> DatasetUrls:
155
- assert dl_manager.manual_dir is not None, dl_manager.manual_dir
156
- dir_path = os.path.expanduser(dl_manager.manual_dir)
157
-
158
- tng_dir_path = os.path.join(dir_path, "train")
159
- tst_dir_path = os.path.join(dir_path, "test")
160
-
161
- if not os.path.exists(dir_path):
162
- raise FileNotFoundError(
163
- "Make sure you have downloaded and placed the PKU-PosterLayout dataset correctly. "
164
- 'Furthermore, you shoud check that a manual dir via `datasets.load_dataset("shunk031/PKU-PosterLayout", data_dir=...)` '
165
- "that include zip files from the downloaded files. "
166
- f"Manual downloaded instructions: {self._manual_download_instructions}"
167
- )
168
- return dl_manager.extract(
169
- path_or_paths={
170
- "train": {
171
- "poster": {
172
- "original": os.path.join(tng_dir_path, "inpainted_poster.zip"),
173
- "inpainted": os.path.join(tng_dir_path, "inpainted_poster.zip"),
174
- },
175
- "saliency_maps": {
176
- "pfpn": os.path.join(tng_dir_path, "saliencymaps_pfpn.zip"),
177
- "basnet": os.path.join(tng_dir_path, "saliencymaps_basnet.zip"),
178
- },
179
- },
180
- "test": {
181
- "poster": {
182
- "canvas": os.path.join(tst_dir_path, "image_canvas.zip"),
183
- },
184
- "saliency_maps": {
185
- "pfpn": os.path.join(tst_dir_path, "salieycmaps_pfpn.zip"),
186
- "basnet": os.path.join(tst_dir_path, "saliencymaps_basnet.zip"),
187
- },
188
- },
189
- }
190
- )
191
-
192
- def _split_generators(self, dl_manager: ds.DownloadManager):
193
- file_paths = (
194
- self._download_from_hf(dl_manager)
195
- if dl_manager.download_config.token
196
- else self._download_from_local(dl_manager)
197
- )
198
-
199
- tng_files = file_paths["train"]
200
- tst_files = file_paths["test"]
201
-
202
- return [
203
- ds.SplitGenerator(
204
- name=ds.Split.TRAIN,
205
- gen_kwargs={
206
- "poster": tng_files["poster"],
207
- "saliency_maps": tng_files["saliency_maps"],
208
- },
209
- ),
210
- ds.SplitGenerator(
211
- name=ds.Split.TEST,
212
- gen_kwargs={
213
- "poster": tst_files["poster"],
214
- "saliency_maps": tst_files["saliency_maps"],
215
- },
216
- ),
217
- ]
218
-
219
- def _generate_train_examples(
220
- self, poster: TrainPoster, saliency_maps: SaliencyMaps
221
- ):
222
- poster_files = get_original_poster_files(base_dir=poster["original"])
223
- inpainted_files = get_inpainted_poster_files(base_dir=poster["inpainted"])
224
-
225
- basnet_map_files = get_basnet_map_files(base_dir=saliency_maps["basnet"])
226
- pfpn_map_files = get_pfpn_map_files(base_dir=saliency_maps["pfpn"])
227
-
228
- assert (
229
- len(poster_files)
230
- == len(inpainted_files)
231
- == len(basnet_map_files)
232
- == len(pfpn_map_files)
233
- )
234
-
235
- it = zip(poster_files, inpainted_files, basnet_map_files, pfpn_map_files)
236
- for i, (
237
- original_poster_path,
238
- inpainted_poster_path,
239
- basnet_map_path,
240
- pfpn_map_path,
241
- ) in enumerate(it):
242
- yield i, {
243
- "original_poster": load_image(original_poster_path),
244
- "inpainted_poster": load_image(inpainted_poster_path),
245
- "basnet_saliency_map": load_image(basnet_map_path),
246
- "pfpn_saliency_map": load_image(pfpn_map_path),
247
- "canvas": None,
248
- }
249
-
250
- def _generate_test_examples(self, poster: TestPoster, saliency_maps: SaliencyMaps):
251
- canvas_files = get_canvas_files(base_dir=poster["canvas"])
252
-
253
- basnet_map_files = get_basnet_map_files(base_dir=saliency_maps["basnet"])
254
- pfpn_map_files = get_pfpn_map_files(base_dir=saliency_maps["pfpn"])
255
-
256
- assert len(canvas_files) == len(basnet_map_files) == len(pfpn_map_files)
257
- it = zip(canvas_files, basnet_map_files, pfpn_map_files)
258
- for i, (canvas_path, basnet_map_path, pfpn_map_path) in enumerate(it):
259
- yield i, {
260
- "original_poster": None,
261
- "inpainted_poster": None,
262
- "basnet_saliency_map": load_image(basnet_map_path),
263
- "pfpn_saliency_map": load_image(pfpn_map_path),
264
- "canvas": load_image(canvas_path),
265
- }
266
-
267
- def _generate_examples(
268
- self, poster: Union[TrainPoster, TestPoster], saliency_maps: SaliencyMaps
269
- ):
270
- if "original" in poster and "inpainted" in poster:
271
- yield from self._generate_train_examples(
272
- poster=cast(TrainPoster, poster), saliency_maps=saliency_maps
273
- )
274
- elif "canvas" in poster:
275
- yield from self._generate_test_examples(
276
- poster=cast(TestPoster, poster), saliency_maps=saliency_maps
277
- )
278
- else:
279
- raise ValueError("Invalid dataset")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
poetry.lock DELETED
The diff for this file is too large to render. See raw diff
 
pyproject.toml DELETED
@@ -1,21 +0,0 @@
1
- [tool.poetry]
2
- name = "huggingface-datasets-posterlayout"
3
- version = "0.1.0"
4
- description = ""
5
- authors = ["Shunsuke KITADA <[email protected]>"]
6
- readme = "README.md"
7
-
8
- [tool.poetry.dependencies]
9
- python = "^3.9"
10
- datasets = {extras = ["vision"], version = "^2.14.6"}
11
-
12
-
13
- [tool.poetry.group.dev.dependencies]
14
- ruff = "^0.1.3"
15
- black = "^23.10.1"
16
- mypy = "^1.6.1"
17
- pytest = "^7.4.3"
18
-
19
- [build-system]
20
- requires = ["poetry-core"]
21
- build-backend = "poetry.core.masonry.api"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/PKU-PosterLayout_test.py DELETED
@@ -1,30 +0,0 @@
1
- import os
2
-
3
- import datasets as ds
4
- import pytest
5
-
6
-
7
- @pytest.fixture
8
- def dataset_path() -> str:
9
- return "PKU-PosterLayout.py"
10
-
11
-
12
- @pytest.mark.skipif(
13
- condition=bool(os.environ.get("CI", False)),
14
- reason=(
15
- "Because this loading script downloads a large dataset, "
16
- "we will skip running it on CI."
17
- ),
18
- )
19
- @pytest.mark.parametrize(
20
- argnames=(
21
- "expected_num_train",
22
- "expected_num_test",
23
- ),
24
- argvalues=((9974, 905),),
25
- )
26
- def test_load_dataset(dataset_path: str, expected_num_train: int, expected_num_test):
27
- dataset = ds.load_dataset(path=dataset_path, token=True)
28
-
29
- assert dataset["train"].num_rows == expected_num_train
30
- assert dataset["test"].num_rows == expected_num_test
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/__init__.py DELETED
File without changes