gdamms commited on
Commit
14aae6f
1 Parent(s): 9830498

More datasets

Browse files
README.md CHANGED
@@ -1,3 +1,10 @@
1
  ---
2
  license: cc-by-2.0
3
  ---
 
 
 
 
 
 
 
 
1
  ---
2
  license: cc-by-2.0
3
  ---
4
+
5
+ DS3 :
6
+ `PhotoFeederv1` + `iNatv1`
7
+
8
+ DS4 :
9
+ `PhotoFeederv1` + `PhotoFeederv2`
10
+ Seulement les 15
bench.sh ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ i=0
2
+ nb_reps=(10 10 10 5 3 3)
3
+ for nb_rep in ${nb_reps[@]}; do
4
+ for j in $(seq 1 $nb_rep); do
5
+ python3 mergesets.py
6
+ cd ../ornithoscope-appel-offre
7
+ python3 src/dataset2ultra.py --dataset_name "DS9.$i"
8
+ python3 src/train.py >> "out_$i.tmp"
9
+ cat "out_$i.tmp" | grep "scores" >> "out.tmp"
10
+ mv "out.tmp" "out_$i.tmp"
11
+ cd ../ornithoscope
12
+ done
13
+ i=$((i + 1))
14
+ done
info.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+
4
+
5
+ class Table:
6
+ def __init__(self, cells: list[list]):
7
+ self.width = max(len(row) for row in cells)
8
+ self.height = len(cells)
9
+ self.cells = [
10
+ row + [''] * (self.width - len(row))
11
+ for row in cells
12
+ ]
13
+
14
+ def __str__(self):
15
+ column_widths = [
16
+ max(
17
+ max(map(len, str(self.cells[row][column]).split('\n')))
18
+ for row in range(self.height)
19
+ )
20
+ for column in range(self.width)
21
+ ]
22
+ line_heights = [
23
+ max(
24
+ len(str(self.cells[row][column]).split('\n'))
25
+ for column in range(self.width)
26
+ )
27
+ for row in range(self.height)
28
+ ]
29
+ cells_str = [
30
+ [
31
+ str(self.cells[row][column]).split('\n')
32
+ for column in range(self.width)
33
+ ]
34
+ for row in range(self.height)
35
+ ]
36
+ for row in range(self.height):
37
+ for column in range(self.width):
38
+ cell_str = cells_str[row][column]
39
+ cell_str += [''] * (line_heights[row] - len(cell_str))
40
+ cells_str[row][column] = cell_str
41
+ lines = []
42
+ for row in range(self.height):
43
+ for line in range(line_heights[row]):
44
+ line_str = ''
45
+ for column in range(self.width):
46
+ cell_str = cells_str[row][column][line]
47
+ line_str += cell_str + ' ' * \
48
+ (column_widths[column] - len(cell_str) + 2)
49
+ lines.append(line_str)
50
+ return '\n'.join(lines)
51
+
52
+
53
+ def analyse(setname):
54
+ global suffs, dir, sets_info
55
+ set_info = {
56
+ 'setname': setname,
57
+ }
58
+ for suff in suffs:
59
+ with open(f'{dir}/{setname}_{suff}.json', 'r') as f:
60
+ data = json.load(f)
61
+ counts = {}
62
+ for value in data.values():
63
+ for box in value['boxes']:
64
+ counts[box['label']] = counts.get(box['label'], 0) + 1
65
+ set_info[suff] = {
66
+ 'counts': counts,
67
+ 'images': len(data),
68
+ }
69
+ sets_info.append(set_info)
70
+
71
+
72
+ suffs = ['train', 'val', 'test']
73
+ dir = 'sets'
74
+ datasets = sorted([
75
+ name[: -len(f'_{suffs[0]}.json')]
76
+ for name in os.listdir(dir)
77
+ if name.endswith(f'_{suffs[0]}.json')
78
+ ])
79
+ sets_info = []
80
+
81
+ for setname in datasets:
82
+ analyse(setname)
83
+
84
+
85
+ species = set()
86
+ for set_info in sets_info:
87
+ for suff_info in set_info.values():
88
+ if isinstance(suff_info, dict):
89
+ for label in suff_info['counts'].keys():
90
+ species.add(label)
91
+ species = sorted(list(species))
92
+
93
+ for set_info in sets_info:
94
+ for suff_info in set_info.values():
95
+ if isinstance(suff_info, dict):
96
+ for label in species:
97
+ suff_info['counts'][label] = suff_info['counts'].get(
98
+ label, 0)
99
+
100
+
101
+ def set_table(set_info):
102
+ global suffs, species
103
+ table = [[suff for suff in suffs]]
104
+ for specie in species:
105
+ row = [set_info[suff]['counts'][specie] for suff in suffs]
106
+ table.append(row)
107
+ return Table(table)
108
+
109
+
110
+ table = [
111
+ ['setname'] + [dataset['setname'] for dataset in sets_info],
112
+ ['\n' + '\n'.join(species)] + [set_table(set_info)
113
+ for set_info in sets_info],
114
+ ]
115
+
116
+ print(Table(table))
info.tmp ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ setname DS1 DS2 DS3 DS4 DS5 DS6 DS7 DS8 DS9.0 DS9.1 DS9.2 DS9.3 DS9.4 DS9.5
2
+ train val test train val test train val test train val test train val test train val test train val test train val test train val test train val test train val test train val test train val test train val test
3
+ ACCMOU 1005 20 92 294 20 92 1279 20 92 315 20 92 1005 20 92 298 20 92 1344 20 92 1001 20 92 10 20 92 20 20 92 50 20 92 100 20 92 250 20 92 500 20 92
4
+ BERGRI 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 23 0 0 23 0 0 23 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
5
+ CAMPAG 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 0 0 16 0 0 16 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
6
+ CHAELE 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 645 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
7
+ CORNOI 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 600 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
8
+ ECUROU 1012 20 31 1344 20 31 2336 20 31 1325 20 31 1012 20 31 1344 20 31 2357 20 31 1344 20 31 10 20 31 20 20 31 50 20 31 100 20 31 250 20 31 500 20 31
9
+ ETOEUR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 600 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
10
+ GEACHE 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 611 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
11
+ GROBEC 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1458 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
12
+ MESBLE 1007 20 425 627 20 425 1614 20 425 4988 20 425 1007 20 425 630 20 425 6018 20 425 1001 20 425 10 20 425 20 20 425 50 20 425 100 20 425 250 20 425 500 20 425
13
+ MESCHA 1028 20 2274 5103 20 2274 6111 20 2274 12317 20 2274 1028 20 2274 5146 20 2274 13408 20 2274 5146 20 2274 10 20 2274 20 20 2274 50 20 2274 100 20 2274 250 20 2274 500 20 2274
14
+ MESHUP 1396 20 0 115 20 0 1491 20 0 478 20 0 1396 20 0 116 20 0 1895 20 0 1001 20 0 10 20 0 20 20 0 50 20 0 100 20 0 250 20 0 500 20 0
15
+ MESNOI 1001 20 34 180 20 34 1161 20 34 705 20 34 1001 20 34 183 20 34 1730 20 34 1001 20 34 10 20 34 20 20 34 50 20 34 100 20 34 250 20 34 500 20 34
16
+ MESNON 1002 20 198 1033 20 198 2015 20 198 3972 20 198 1002 20 198 1045 20 198 5006 20 198 1045 20 198 10 20 198 20 20 198 50 20 198 100 20 198 250 20 198 500 20 198
17
+ MOIDOM 1033 8 23 8 8 23 1033 8 23 515 8 23 1033 8 23 77 8 23 1625 8 23 1001 8 23 10 8 23 20 8 23 50 8 23 100 8 23 250 8 23 500 8 23
18
+ MOIFRI 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 634 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
19
+ MULGRI 855 20 19 69 20 19 904 20 19 51 20 19 855 20 19 78 20 19 935 20 19 933 20 19 10 20 19 20 20 19 50 20 19 100 20 19 250 20 19 500 20 19
20
+ ORILON 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1247 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
21
+ PERCOL 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1001 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
22
+ PICEIP 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 787 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
23
+ PICMAR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1060 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
24
+ PIEBAV 1025 20 173 393 20 173 1398 20 173 373 20 173 1025 20 173 393 20 173 1418 20 173 1001 20 173 10 20 173 20 20 173 50 20 173 100 20 173 250 20 173 500 20 173
25
+ PIGBIS 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
26
+ PINARB 1022 20 26 324 20 26 1326 20 26 783 20 26 1022 20 26 327 20 26 1831 20 26 1001 20 26 10 20 26 20 20 26 50 20 26 100 20 26 250 20 26 500 20 26
27
+ PINNOR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 15 0 0 2400 0 0 15 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
28
+ ROUGOR 1007 20 110 127 20 110 1114 20 110 588 20 110 1007 20 110 127 20 110 1615 20 110 1001 20 110 10 20 110 20 20 110 50 20 110 100 20 110 250 20 110 500 20 110
29
+ SITTOR 1012 20 154 556 20 154 1548 20 154 2861 20 154 1012 20 154 559 20 154 3896 20 154 1001 20 154 10 20 154 20 20 154 50 20 154 100 20 154 250 20 154 500 20 154
30
+ TARAUL 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 600 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
31
+ TOUTUR 1065 20 39 43 20 39 1088 20 39 24 20 39 1065 20 39 43 20 39 1110 20 39 1001 20 39 10 20 39 20 20 39 51 20 39 100 20 39 250 20 39 500 20 39
32
+ UL 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
33
+ VEREUR 1053 20 161 743 20 161 1776 20 161 775 20 161 1053 20 161 749 20 161 1854 20 161 1001 20 161 10 20 161 20 20 161 50 20 161 100 20 161 250 20 161 500 20 161
34
+ faumel 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
35
+ human 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 26 0 0 28 0 0 26 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
36
+ noBird 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1793 0 0 1793 0 0 1793 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
37
+ unknown 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 244 0 0 589 0 0 244 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
38
+ v 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
mergesets.py CHANGED
@@ -1,9 +1,10 @@
1
  import os
2
  import json
 
 
3
 
4
 
5
  dir = 'annotations'
6
- merged = {}
7
  excluded = [
8
  'task_2021-03-01_09',
9
  'task_2021-03-01_10',
@@ -34,7 +35,28 @@ def merge_files(path):
34
  merged = merged | data
35
 
36
 
37
- iter_files(dir, merge_files)
 
 
 
38
 
39
- with open('sets/DS7_train.json', 'w') as f:
40
- json.dump(merged, f)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import json
3
+ import shutil
4
+ import numpy as np
5
 
6
 
7
  dir = 'annotations'
 
8
  excluded = [
9
  'task_2021-03-01_09',
10
  'task_2021-03-01_10',
 
35
  merged = merged | data
36
 
37
 
38
+ n_images = [10, 20, 50, 100, 250, 500]
39
+ for i, n in enumerate(n_images):
40
+ merged = {}
41
+ counts = {}
42
 
43
+ with open('annotations/iNatv1.json', 'r') as f:
44
+ data = json.load(f)
45
+ items = list(data.items())
46
+ np.random.shuffle(items)
47
+ for key, value in items:
48
+ to_add = True
49
+ for box in value['boxes']:
50
+ if box['label'] in counts and counts[box['label']] >= n:
51
+ to_add = False
52
+ break
53
+ if to_add:
54
+ merged[key] = value
55
+ for box in value['boxes']:
56
+ counts[box['label']] = counts.get(box['label'], 0) + 1
57
+
58
+ with open(f'sets/DS9.{i}_train.json', 'w') as f:
59
+ json.dump(merged, f)
60
+
61
+ shutil.copyfile('sets/DS8_val.json', f'sets/DS9.{i}_val.json')
62
+ shutil.copyfile('sets/DS8_test.json', f'sets/DS9.{i}_test.json')
ornithoscope.py CHANGED
@@ -2,7 +2,6 @@ import json
2
  import os
3
 
4
  import datasets
5
- from datasets.tasks import ImageClassification
6
 
7
 
8
  logger = datasets.logging.get_logger(__name__)
@@ -123,43 +122,32 @@ class OrnithoscopeConfig(datasets.BuilderConfig):
123
 
124
  class Ornithoscope(datasets.GeneratorBasedBuilder):
125
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
  VERSION = datasets.Version("1.0.0")
127
  BUILDER_CONFIGS = [
128
  OrnithoscopeConfig(
129
- name="DS3",
130
- description="The main dataset.",
131
- train_json="sets/DS3_train.json",
132
- validation_json="sets/DS3_val.json",
133
- test_json="sets/DS3_test.json",
134
- ),
135
- OrnithoscopeConfig(
136
- name="DS4",
137
- description="The new dataset.",
138
- train_json="sets/DS4_train.json",
139
- validation_json="sets/DS4_val.json",
140
- test_json="sets/DS4_test.json",
141
- ),
142
- OrnithoscopeConfig(
143
- name="DS5",
144
- description="The new dataset.",
145
- train_json="sets/DS5_train.json",
146
- validation_json="sets/DS5_val.json",
147
- test_json="sets/DS5_test.json",
148
- ),
149
- OrnithoscopeConfig(
150
- name="DS6",
151
- description="The new dataset.",
152
- train_json="sets/DS6_train.json",
153
- validation_json="sets/DS6_val.json",
154
- test_json="sets/DS6_test.json",
155
- ),
156
- OrnithoscopeConfig(
157
- name="DS7",
158
- description="The new dataset.",
159
- train_json="sets/DS7_train.json",
160
- validation_json="sets/DS7_val.json",
161
- test_json="sets/DS7_test.json",
162
- ),
163
  ]
164
 
165
  def _info(self) -> datasets.DatasetInfo:
 
2
  import os
3
 
4
  import datasets
 
5
 
6
 
7
  logger = datasets.logging.get_logger(__name__)
 
122
 
123
  class Ornithoscope(datasets.GeneratorBasedBuilder):
124
 
125
+ NAMES = [
126
+ 'DS1',
127
+ 'DS2',
128
+ 'DS3',
129
+ 'DS4',
130
+ 'DS5',
131
+ 'DS6',
132
+ 'DS7',
133
+ 'DS8',
134
+ 'DS9.0',
135
+ 'DS9.1',
136
+ 'DS9.2',
137
+ 'DS9.3',
138
+ 'DS9.4',
139
+ 'DS9.5',
140
+ ]
141
  VERSION = datasets.Version("1.0.0")
142
  BUILDER_CONFIGS = [
143
  OrnithoscopeConfig(
144
+ name=name,
145
+ description=f'{name} ornithoscope dataset.',
146
+ train_json=f'sets/{name}_train.json',
147
+ validation_json=f'sets/{name}_val.json',
148
+ test_json=f'sets/{name}_test.json',
149
+ )
150
+ for name in NAMES
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
151
  ]
152
 
153
  def _info(self) -> datasets.DatasetInfo:
sets/DS8_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS8_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f1425ba861a363bd0aa6592e475ff20ec5407b89d173fa8644d22a0b80fee75
3
+ size 7938808
sets/DS8_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069
sets/DS9.0_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS9.0_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9808955fbd7f1101da59e795d4f8c4049125dc894e5b1b3639e2f55d347f1d6e
3
+ size 30616
sets/DS9.0_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069
sets/DS9.1_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS9.1_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cee6b8b4f11d237b638d768ab3ffb74e158fb7ee49e68d489ab57ba70766cfda
3
+ size 61483
sets/DS9.1_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069
sets/DS9.2_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS9.2_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:954c7a5eb2d23749f6bbaa1f844aba129f07e87792ffa25c7e94427c1a58d6d2
3
+ size 153548
sets/DS9.2_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069
sets/DS9.3_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS9.3_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb903bcc5558520dbb5941e236775db0ab0c9a1d60fe49c23902d2ff56206a10
3
+ size 306050
sets/DS9.3_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069
sets/DS9.4_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS9.4_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26b6cd82f67af8a9c428a4f66487b3432041c1954460cc0e91d39db515e55a1c
3
+ size 764721
sets/DS9.4_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069
sets/DS9.5_test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50d27218640bffd765c6b5d57182f59c344f954d7afbafc7a970fbe6b79a76a
3
+ size 1367416
sets/DS9.5_train.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdf5f27c05915178eb909b009577af6daa3ef0100ee4eaa67a36ecd3de32f1ef
3
+ size 1528270
sets/DS9.5_val.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8df387f34459cb2e6ac2718d502fb752d21e09c2c89720c60f0789225332c71
3
+ size 101069