Datasets:

Tasks:
Other
Languages:
Chinese
ArXiv:
License:
shunk031 commited on
Commit
f689054
1 Parent(s): 977bca4

Initialize (#1)

Browse files

* add files

* update

* add CI settings

* add settings for CI

* update README

* update

* update

* update

.github/workflows/ci.yaml ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: CI
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+ paths-ignore:
9
+ - "README.md"
10
+
11
+ jobs:
12
+ test:
13
+ runs-on: ubuntu-latest
14
+ strategy:
15
+ matrix:
16
+ python-version: ["3.9", "3.10"]
17
+
18
+ steps:
19
+ - uses: actions/checkout@v3
20
+
21
+ - name: Set up Python ${{ matrix.python-version }}
22
+ uses: actions/setup-python@v4
23
+ with:
24
+ python-version: ${{ matrix.python-version }}
25
+
26
+ - name: Install dependencies
27
+ run: |
28
+ pip install -U pip setuptools wheel poetry
29
+ poetry install
30
+
31
+ - name: Format
32
+ run: |
33
+ poetry run black --check .
34
+
35
+ - name: Lint
36
+ run: |
37
+ poetry run ruff .
38
+
39
+ - name: Type check
40
+ run: |
41
+ poetry run mypy . \
42
+ --ignore-missing-imports \
43
+ --no-strict-optional \
44
+ --no-site-packages \
45
+ --cache-dir=/dev/null
46
+
47
+ - name: Run tests
48
+ run: |
49
+ poetry run pytest --color=yes -rf
.github/workflows/push_to_hub.yaml ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Sync to Hugging Face Hub
2
+
3
+ on:
4
+ workflow_run:
5
+ workflows:
6
+ - CI
7
+ branches:
8
+ - main
9
+ types:
10
+ - completed
11
+
12
+ jobs:
13
+ push_to_hub:
14
+ runs-on: ubuntu-latest
15
+
16
+ steps:
17
+ - name: Checkout repository
18
+ uses: actions/checkout@v3
19
+
20
+ - name: Push to Huggingface hub
21
+ env:
22
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
23
+ HF_USERNAME: ${{ secrets.HF_USERNAME }}
24
+ run: |
25
+ git fetch --unshallow
26
+ git push --force https://${HF_USERNAME}:${HF_TOKEN}@huggingface.co/datasets/${HF_USERNAME}/PKU-PosterLayout main
.gitignore ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Created by https://www.toptal.com/developers/gitignore/api/python
2
+ # Edit at https://www.toptal.com/developers/gitignore?templates=python
3
+
4
+ ### Python ###
5
+ # Byte-compiled / optimized / DLL files
6
+ __pycache__/
7
+ *.py[cod]
8
+ *$py.class
9
+
10
+ # C extensions
11
+ *.so
12
+
13
+ # Distribution / packaging
14
+ .Python
15
+ build/
16
+ develop-eggs/
17
+ dist/
18
+ downloads/
19
+ eggs/
20
+ .eggs/
21
+ lib/
22
+ lib64/
23
+ parts/
24
+ sdist/
25
+ var/
26
+ wheels/
27
+ share/python-wheels/
28
+ *.egg-info/
29
+ .installed.cfg
30
+ *.egg
31
+ MANIFEST
32
+
33
+ # PyInstaller
34
+ # Usually these files are written by a python script from a template
35
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
36
+ *.manifest
37
+ *.spec
38
+
39
+ # Installer logs
40
+ pip-log.txt
41
+ pip-delete-this-directory.txt
42
+
43
+ # Unit test / coverage reports
44
+ htmlcov/
45
+ .tox/
46
+ .nox/
47
+ .coverage
48
+ .coverage.*
49
+ .cache
50
+ nosetests.xml
51
+ coverage.xml
52
+ *.cover
53
+ *.py,cover
54
+ .hypothesis/
55
+ .pytest_cache/
56
+ cover/
57
+
58
+ # Translations
59
+ *.mo
60
+ *.pot
61
+
62
+ # Django stuff:
63
+ *.log
64
+ local_settings.py
65
+ db.sqlite3
66
+ db.sqlite3-journal
67
+
68
+ # Flask stuff:
69
+ instance/
70
+ .webassets-cache
71
+
72
+ # Scrapy stuff:
73
+ .scrapy
74
+
75
+ # Sphinx documentation
76
+ docs/_build/
77
+
78
+ # PyBuilder
79
+ .pybuilder/
80
+ target/
81
+
82
+ # Jupyter Notebook
83
+ .ipynb_checkpoints
84
+
85
+ # IPython
86
+ profile_default/
87
+ ipython_config.py
88
+
89
+ # pyenv
90
+ # For a library or package, you might want to ignore these files since the code is
91
+ # intended to run in multiple environments; otherwise, check them in:
92
+ .python-version
93
+
94
+ # pipenv
95
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
96
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
97
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
98
+ # install all needed dependencies.
99
+ #Pipfile.lock
100
+
101
+ # poetry
102
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
103
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
104
+ # commonly ignored for libraries.
105
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
106
+ #poetry.lock
107
+
108
+ # pdm
109
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
110
+ #pdm.lock
111
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
112
+ # in version control.
113
+ # https://pdm.fming.dev/#use-with-ide
114
+ .pdm.toml
115
+
116
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
117
+ __pypackages__/
118
+
119
+ # Celery stuff
120
+ celerybeat-schedule
121
+ celerybeat.pid
122
+
123
+ # SageMath parsed files
124
+ *.sage.py
125
+
126
+ # Environments
127
+ .env
128
+ .venv
129
+ env/
130
+ venv/
131
+ ENV/
132
+ env.bak/
133
+ venv.bak/
134
+
135
+ # Spyder project settings
136
+ .spyderproject
137
+ .spyproject
138
+
139
+ # Rope project settings
140
+ .ropeproject
141
+
142
+ # mkdocs documentation
143
+ /site
144
+
145
+ # mypy
146
+ .mypy_cache/
147
+ .dmypy.json
148
+ dmypy.json
149
+
150
+ # Pyre type checker
151
+ .pyre/
152
+
153
+ # pytype static type analyzer
154
+ .pytype/
155
+
156
+ # Cython debug symbols
157
+ cython_debug/
158
+
159
+ # PyCharm
160
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
161
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
162
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
163
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
164
+ #.idea/
165
+
166
+ ### Python Patch ###
167
+ # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
168
+ poetry.toml
169
+
170
+ # ruff
171
+ .ruff_cache/
172
+
173
+ # LSP config files
174
+ pyrightconfig.json
175
+
176
+ # End of https://www.toptal.com/developers/gitignore/api/python
PKU-PosterLayout.py ADDED
@@ -0,0 +1,279 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pathlib
3
+ from typing import List, TypedDict, Union, cast
4
+
5
+ import datasets as ds
6
+ from datasets.utils.logging import get_logger
7
+ from PIL import Image
8
+ from PIL.Image import Image as PilImage
9
+
10
+ logger = get_logger(__name__)
11
+
12
+ _DESCRIPTION = (
13
+ "A New Dataset and Benchmark for Content-aware Visual-Textual Presentation Layout"
14
+ )
15
+
16
+ _CITATION = """\
17
+ @inproceedings{hsu2023posterlayout,
18
+ title={PosterLayout: A New Benchmark and Approach for Content-aware Visual-Textual Presentation Layout},
19
+ author={Hsu, Hsiao Yuan and He, Xiangteng and Peng, Yuxin and Kong, Hao and Zhang, Qing},
20
+ booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
21
+ pages={6018--6026},
22
+ year={2023}
23
+ }
24
+ """
25
+
26
+ _HOMEPAGE = "http://59.108.48.34/tiki/PosterLayout/"
27
+
28
+ _LICENSE = "Images in PKU PosterLayout are distributed under the CC BY-SA 4.0 license."
29
+
30
+
31
+ class TrainPoster(TypedDict):
32
+ original: str
33
+ inpainted: str
34
+
35
+
36
+ class TestPoster(TypedDict):
37
+ canvas: str
38
+
39
+
40
+ class SaliencyMaps(TypedDict):
41
+ pfpn: str
42
+ basnet: str
43
+
44
+
45
+ class TrainDataset(TypedDict):
46
+ poster: TrainPoster
47
+ saliency_maps: SaliencyMaps
48
+
49
+
50
+ class TestDataset(TypedDict):
51
+ poster: TestPoster
52
+ saliency_maps: SaliencyMaps
53
+
54
+
55
+ class DatasetUrls(TypedDict):
56
+ train: TrainDataset
57
+ test: TestDataset
58
+
59
+
60
+ # The author of this loading script has uploaded the poster image and saliency maps to the HuggingFace's private repository to facilitate testing.
61
+ # If you are using this loading script, please download the annotations from the appropriate channels, such as the OneDrive link provided by the Magazine dataset's author.
62
+ # (To the author of Magazine dataset, if there are any issues regarding this matter, please contact us. We will address it promptly.)
63
+ _URLS: DatasetUrls = {
64
+ "train": {
65
+ "poster": {
66
+ "original": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/original_poster.zip",
67
+ "inpainted": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/inpainted_poster.zip",
68
+ },
69
+ "saliency_maps": {
70
+ "pfpn": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/saliencymaps_pfpn.zip",
71
+ "basnet": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/train/saliencymaps_basnet.zip",
72
+ },
73
+ },
74
+ "test": {
75
+ "poster": {
76
+ "canvas": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/test/image_canvas.zip",
77
+ },
78
+ "saliency_maps": {
79
+ "pfpn": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/test/saliencymaps_pfpn.zip",
80
+ "basnet": "https://huggingface.co/datasets/shunk031/PKU-PosterLayout-private/resolve/main/test/saliencymaps_basnet.zip",
81
+ },
82
+ },
83
+ }
84
+
85
+
86
+ def file_sorter(f: pathlib.Path) -> int:
87
+ idx, *_ = f.stem.split("_")
88
+ return int(idx)
89
+
90
+
91
+ def load_image(file_path: pathlib.Path) -> PilImage:
92
+ logger.info(f"Load from {file_path}")
93
+ return Image.open(file_path)
94
+
95
+
96
+ def get_original_poster_files(base_dir: str) -> List[pathlib.Path]:
97
+ poster_dir = pathlib.Path(base_dir) / "original_poster"
98
+ return sorted(poster_dir.iterdir(), key=lambda f: int(f.stem))
99
+
100
+
101
+ def get_inpainted_poster_files(base_dir: str) -> List[pathlib.Path]:
102
+ inpainted_dir = pathlib.Path(base_dir) / "inpainted_poster"
103
+ return sorted(inpainted_dir.iterdir(), key=file_sorter)
104
+
105
+
106
+ def get_basnet_map_files(base_dir: str) -> List[pathlib.Path]:
107
+ basnet_map_dir = pathlib.Path(base_dir) / "saliencymaps_basnet"
108
+ return sorted(basnet_map_dir.iterdir(), key=file_sorter)
109
+
110
+
111
+ def get_pfpn_map_files(base_dir: str) -> List[pathlib.Path]:
112
+ pfpn_map_dir = pathlib.Path(base_dir) / "saliencymaps_pfpn"
113
+ return sorted(pfpn_map_dir.iterdir(), key=file_sorter)
114
+
115
+
116
+ def get_canvas_files(base_dir: str) -> List[pathlib.Path]:
117
+ canvas_dir = pathlib.Path(base_dir) / "image_canvas"
118
+ return sorted(canvas_dir.iterdir(), key=lambda f: int(f.stem))
119
+
120
+
121
+ class PosterLayoutDataset(ds.GeneratorBasedBuilder):
122
+ VERSION = ds.Version("1.0.0")
123
+ BUILDER_CONFIGS = [ds.BuilderConfig(version=VERSION)]
124
+
125
+ def _info(self) -> ds.DatasetInfo:
126
+ features = ds.Features(
127
+ {
128
+ "original_poster": ds.Image(),
129
+ "inpainted_poster": ds.Image(),
130
+ "basnet_saliency_map": ds.Image(),
131
+ "pfpn_saliency_map": ds.Image(),
132
+ "canvas": ds.Image(),
133
+ }
134
+ )
135
+ return ds.DatasetInfo(
136
+ description=_DESCRIPTION,
137
+ citation=_CITATION,
138
+ homepage=_HOMEPAGE,
139
+ license=_LICENSE,
140
+ features=features,
141
+ )
142
+
143
+ @property
144
+ def _manual_download_instructions(self) -> str:
145
+ return (
146
+ "To use PKU-PosterLayout dataset, you need to download the poster image "
147
+ "and saliency maps via [PKU Netdisk](https://disk.pku.edu.cn/link/999C6E97BB354DF8AD0F9E1F9003BE05) "
148
+ "or [Google Drive](https://drive.google.com/drive/folders/1Gk202RVs9Qy2zbJUNeurC1CaQYNU-Vuv?usp=share_link)."
149
+ )
150
+
151
+ def _download_from_hf(self, dl_manager: ds.DownloadManager) -> DatasetUrls:
152
+ return dl_manager.download_and_extract(_URLS)
153
+
154
+ def _download_from_local(self, dl_manager: ds.DownloadManager) -> DatasetUrls:
155
+ assert dl_manager.manual_dir is not None, dl_manager.manual_dir
156
+ dir_path = os.path.expanduser(dl_manager.manual_dir)
157
+
158
+ tng_dir_path = os.path.join(dir_path, "train")
159
+ tst_dir_path = os.path.join(dir_path, "test")
160
+
161
+ if not os.path.exists(dir_path):
162
+ raise FileNotFoundError(
163
+ "Make sure you have downloaded and placed the PKU-PosterLayout dataset correctly. "
164
+ 'Furthermore, you shoud check that a manual dir via `datasets.load_dataset("shunk031/PKU-PosterLayout", data_dir=...)` '
165
+ "that include zip files from the downloaded files. "
166
+ f"Manual downloaded instructions: {self._manual_download_instructions}"
167
+ )
168
+ return dl_manager.extract(
169
+ path_or_paths={
170
+ "train": {
171
+ "poster": {
172
+ "original": os.path.join(tng_dir_path, "inpainted_poster.zip"),
173
+ "inpainted": os.path.join(tng_dir_path, "inpainted_poster.zip"),
174
+ },
175
+ "saliency_maps": {
176
+ "pfpn": os.path.join(tng_dir_path, "saliencymaps_pfpn.zip"),
177
+ "basnet": os.path.join(tng_dir_path, "saliencymaps_basnet.zip"),
178
+ },
179
+ },
180
+ "test": {
181
+ "poster": {
182
+ "canvas": os.path.join(tst_dir_path, "image_canvas.zip"),
183
+ },
184
+ "saliency_maps": {
185
+ "pfpn": os.path.join(tst_dir_path, "salieycmaps_pfpn.zip"),
186
+ "basnet": os.path.join(tst_dir_path, "saliencymaps_basnet.zip"),
187
+ },
188
+ },
189
+ }
190
+ )
191
+
192
+ def _split_generators(self, dl_manager: ds.DownloadManager):
193
+ file_paths = (
194
+ self._download_from_hf(dl_manager)
195
+ if dl_manager.download_config.token
196
+ else self._download_from_local(dl_manager)
197
+ )
198
+
199
+ tng_files = file_paths["train"]
200
+ tst_files = file_paths["test"]
201
+
202
+ return [
203
+ ds.SplitGenerator(
204
+ name=ds.Split.TRAIN,
205
+ gen_kwargs={
206
+ "poster": tng_files["poster"],
207
+ "saliency_maps": tng_files["saliency_maps"],
208
+ },
209
+ ),
210
+ ds.SplitGenerator(
211
+ name=ds.Split.TEST,
212
+ gen_kwargs={
213
+ "poster": tst_files["poster"],
214
+ "saliency_maps": tst_files["saliency_maps"],
215
+ },
216
+ ),
217
+ ]
218
+
219
+ def _generate_train_examples(
220
+ self, poster: TrainPoster, saliency_maps: SaliencyMaps
221
+ ):
222
+ poster_files = get_original_poster_files(base_dir=poster["original"])
223
+ inpainted_files = get_inpainted_poster_files(base_dir=poster["inpainted"])
224
+
225
+ basnet_map_files = get_basnet_map_files(base_dir=saliency_maps["basnet"])
226
+ pfpn_map_files = get_pfpn_map_files(base_dir=saliency_maps["pfpn"])
227
+
228
+ assert (
229
+ len(poster_files)
230
+ == len(inpainted_files)
231
+ == len(basnet_map_files)
232
+ == len(pfpn_map_files)
233
+ )
234
+
235
+ it = zip(poster_files, inpainted_files, basnet_map_files, pfpn_map_files)
236
+ for i, (
237
+ original_poster_path,
238
+ inpainted_poster_path,
239
+ basnet_map_path,
240
+ pfpn_map_path,
241
+ ) in enumerate(it):
242
+ yield i, {
243
+ "original_poster": load_image(original_poster_path),
244
+ "inpainted_poster": load_image(inpainted_poster_path),
245
+ "basnet_saliency_map": load_image(basnet_map_path),
246
+ "pfpn_saliency_map": load_image(pfpn_map_path),
247
+ "canvas": None,
248
+ }
249
+
250
+ def _generate_test_examples(self, poster: TestPoster, saliency_maps: SaliencyMaps):
251
+ canvas_files = get_canvas_files(base_dir=poster["canvas"])
252
+
253
+ basnet_map_files = get_basnet_map_files(base_dir=saliency_maps["basnet"])
254
+ pfpn_map_files = get_pfpn_map_files(base_dir=saliency_maps["pfpn"])
255
+
256
+ assert len(canvas_files) == len(basnet_map_files) == len(pfpn_map_files)
257
+ it = zip(canvas_files, basnet_map_files, pfpn_map_files)
258
+ for i, (canvas_path, basnet_map_path, pfpn_map_path) in enumerate(it):
259
+ yield i, {
260
+ "original_poster": None,
261
+ "inpainted_poster": None,
262
+ "basnet_saliency_map": load_image(basnet_map_path),
263
+ "pfpn_saliency_map": load_image(pfpn_map_path),
264
+ "canvas": load_image(canvas_path),
265
+ }
266
+
267
+ def _generate_examples(
268
+ self, poster: Union[TrainPoster, TestPoster], saliency_maps: SaliencyMaps
269
+ ):
270
+ if "original" in poster and "inpainted" in poster:
271
+ yield from self._generate_train_examples(
272
+ poster=cast(TrainPoster, poster), saliency_maps=saliency_maps
273
+ )
274
+ elif "canvas" in poster:
275
+ yield from self._generate_test_examples(
276
+ poster=cast(TestPoster, poster), saliency_maps=saliency_maps
277
+ )
278
+ else:
279
+ raise ValueError("Invalid dataset")
README.md ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ annotations_creators:
3
+ - expert-generated
4
+ language:
5
+ - zh
6
+ language_creators:
7
+ - found
8
+ license:
9
+ - cc-by-sa-4.0
10
+ multilinguality: []
11
+ pretty_name: PKU-PosterLayout
12
+ size_categories: []
13
+ source_datasets:
14
+ - extended
15
+ tags:
16
+ - layout-generation
17
+ task_categories:
18
+ - other
19
+ task_ids: []
20
+ ---
21
+
22
+ # Dataset Card for PKU-PosterLayout
23
+
24
+ [![CI](https://github.com/shunk031/huggingface-datasets_PKU-PosterLayout/actions/workflows/ci.yaml/badge.svg)](https://github.com/shunk031/huggingface-datasets_PKU-PosterLayout/actions/workflows/ci.yaml)
25
+
26
+ ## Table of Contents
27
+
28
+ - [Dataset Card Creation Guide](#dataset-card-creation-guide)
29
+ - [Table of Contents](#table-of-contents)
30
+ - [Dataset Description](#dataset-description)
31
+ - [Dataset Summary](#dataset-summary)
32
+ - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
33
+ - [Languages](#languages)
34
+ - [Dataset Structure](#dataset-structure)
35
+ - [Data Instances](#data-instances)
36
+ - [Data Fields](#data-fields)
37
+ - [Data Splits](#data-splits)
38
+ - [Dataset Creation](#dataset-creation)
39
+ - [Curation Rationale](#curation-rationale)
40
+ - [Source Data](#source-data)
41
+ - [Initial Data Collection and Normalization](#initial-data-collection-and-normalization)
42
+ - [Who are the source language producers?](#who-are-the-source-language-producers)
43
+ - [Annotations](#annotations)
44
+ - [Annotation process](#annotation-process)
45
+ - [Who are the annotators?](#who-are-the-annotators)
46
+ - [Personal and Sensitive Information](#personal-and-sensitive-information)
47
+ - [Considerations for Using the Data](#considerations-for-using-the-data)
48
+ - [Social Impact of Dataset](#social-impact-of-dataset)
49
+ - [Discussion of Biases](#discussion-of-biases)
50
+ - [Other Known Limitations](#other-known-limitations)
51
+ - [Additional Information](#additional-information)
52
+ - [Dataset Curators](#dataset-curators)
53
+ - [Licensing Information](#licensing-information)
54
+ - [Citation Information](#citation-information)
55
+ - [Contributions](#contributions)
56
+
57
+ ## Dataset Description
58
+
59
+ - **Homepage:** http://59.108.48.34/tiki/PosterLayout/
60
+ - **Repository:** https://github.com/shunk031/huggingface-datasets_PKU-PosterLayout
61
+ - **Paper (Preprint):** https://arxiv.org/abs/2303.15937
62
+ - **Paper (CVPR2023):** https://openaccess.thecvf.com/content/CVPR2023/html/Hsu_PosterLayout_A_New_Benchmark_and_Approach_for_Content-Aware_Visual-Textual_Presentation_CVPR_2023_paper.html
63
+
64
+ ### Dataset Summary
65
+
66
+ PKU-PosterLayout is a new dataset and benchmark for content-aware visual-textual presentation layout.
67
+
68
+ ### Supported Tasks and Leaderboards
69
+
70
+ [More Information Needed]
71
+
72
+ ### Languages
73
+
74
+ The language data in PKU-PosterLayout is in Chinese ([BCP-47 zh](https://www.rfc-editor.org/info/bcp47)).
75
+
76
+ ## Dataset Structure
77
+
78
+ ### Data Instances
79
+
80
+ To use PKU-PosterLayout dataset, you need to download the poster image and saliency maps via [PKU Netdisk](https://disk.pku.edu.cn/link/999C6E97BB354DF8AD0F9E1F9003BE05) or [Google Drive](https://drive.google.com/drive/folders/1Gk202RVs9Qy2zbJUNeurC1CaQYNU-Vuv?usp=share_link).
81
+
82
+ ```
83
+ /path/to/datasets
84
+ ├── train
85
+ │ ├── inpainted_poster.zip
86
+ │ ├── original_poster.zip
87
+ │ ├── saliencymaps_basnet.zip
88
+ │ └── saliencymaps_pfpn.zip
89
+ └── test
90
+ ├── image_canvas.zip
91
+ ├── saliencymaps_basnet.zip
92
+ └── saliencymaps_pfpn.zip
93
+ ```
94
+
95
+ ```python
96
+ import datasets as ds
97
+
98
+ dataset = ds.load_dataset(
99
+ path="shunk031/PKU-PosterLayout",
100
+ data_dir="/path/to/datasets/",
101
+ )
102
+ ```
103
+
104
+ ### Data Fields
105
+
106
+ [More Information Needed]
107
+
108
+ ### Data Splits
109
+
110
+ [More Information Needed]
111
+
112
+ ## Dataset Creation
113
+
114
+ ### Curation Rationale
115
+
116
+ [More Information Needed]
117
+
118
+ ### Source Data
119
+
120
+ [More Information Needed]
121
+
122
+ #### Initial Data Collection and Normalization
123
+
124
+ [More Information Needed]
125
+
126
+ #### Who are the source language producers?
127
+
128
+ [More Information Needed]
129
+
130
+ ### Annotations
131
+
132
+ [More Information Needed]
133
+
134
+ #### Annotation process
135
+
136
+ [More Information Needed]
137
+
138
+ #### Who are the annotators?
139
+
140
+ [More Information Needed]
141
+
142
+ ### Personal and Sensitive Information
143
+
144
+ [More Information Needed]
145
+
146
+ ## Considerations for Using the Data
147
+
148
+ ### Social Impact of Dataset
149
+
150
+ [More Information Needed]
151
+
152
+ ### Discussion of Biases
153
+
154
+ [More Information Needed]
155
+
156
+ ### Other Known Limitations
157
+
158
+ [More Information Needed]
159
+
160
+ ## Additional Information
161
+
162
+ ### Dataset Curators
163
+
164
+ [More Information Needed]
165
+
166
+ ### Licensing Information
167
+
168
+ [More Information Needed]
169
+
170
+ ### Citation Information
171
+
172
+ ```bibtex
173
+ @inproceedings{hsu2023posterlayout,
174
+ title={PosterLayout: A New Benchmark and Approach for Content-aware Visual-Textual Presentation Layout},
175
+ author={Hsu, Hsiao Yuan and He, Xiangteng and Peng, Yuxin and Kong, Hao and Zhang, Qing},
176
+ booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
177
+ pages={6018--6026},
178
+ year={2023}
179
+ }
180
+ ```
181
+
182
+ ### Contributions
183
+
184
+ Thanks to [@PKU-ICST-MIPL](https://github.com/PKU-ICST-MIPL) for creating this dataset.
poetry.lock ADDED
The diff for this file is too large to render. See raw diff
 
pyproject.toml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.poetry]
2
+ name = "huggingface-datasets-posterlayout"
3
+ version = "0.1.0"
4
+ description = ""
5
+ authors = ["Shunsuke KITADA <[email protected]>"]
6
+ readme = "README.md"
7
+
8
+ [tool.poetry.dependencies]
9
+ python = "^3.9"
10
+ datasets = {extras = ["vision"], version = "^2.14.6"}
11
+
12
+
13
+ [tool.poetry.group.dev.dependencies]
14
+ ruff = "^0.1.3"
15
+ black = "^23.10.1"
16
+ mypy = "^1.6.1"
17
+ pytest = "^7.4.3"
18
+
19
+ [build-system]
20
+ requires = ["poetry-core"]
21
+ build-backend = "poetry.core.masonry.api"
tests/PKU-PosterLayout_test.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ import datasets as ds
4
+ import pytest
5
+
6
+
7
+ @pytest.fixture
8
+ def dataset_path() -> str:
9
+ return "PKU-PosterLayout.py"
10
+
11
+
12
+ @pytest.mark.skipif(
13
+ condition=bool(os.environ.get("CI", False)),
14
+ reason=(
15
+ "Because this loading script downloads a large dataset, "
16
+ "we will skip running it on CI."
17
+ ),
18
+ )
19
+ @pytest.mark.parametrize(
20
+ argnames=(
21
+ "expected_num_train",
22
+ "expected_num_test",
23
+ ),
24
+ argvalues=((9974, 905),),
25
+ )
26
+ def test_load_dataset(dataset_path: str, expected_num_train: int, expected_num_test):
27
+ dataset = ds.load_dataset(path=dataset_path, token=True)
28
+
29
+ assert dataset["train"].num_rows == expected_num_train
30
+ assert dataset["test"].num_rows == expected_num_test
tests/__init__.py ADDED
File without changes