tiedeman commited on
Commit
3e7352a
1 Parent(s): 10b3da5

Initial commit

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.spm filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,1770 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ language:
4
+ - da
5
+ - de
6
+ - en
7
+ - es
8
+ - fo
9
+ - fr
10
+ - is
11
+ - nb
12
+ - nn
13
+ - no
14
+ - non
15
+ - pt
16
+ - sv
17
+
18
+ tags:
19
+ - translation
20
+ - opus-mt-tc-bible
21
+
22
+ license: apache-2.0
23
+ model-index:
24
+ - name: opus-mt-tc-bible-big-gmq-deu_eng_fra_por_spa
25
+ results:
26
+ - task:
27
+ name: Translation dan-deu
28
+ type: translation
29
+ args: dan-deu
30
+ dataset:
31
+ name: flores200-devtest
32
+ type: flores200-devtest
33
+ args: dan-deu
34
+ metrics:
35
+ - name: BLEU
36
+ type: bleu
37
+ value: 32.3
38
+ - name: chr-F
39
+ type: chrf
40
+ value: 0.60897
41
+ - task:
42
+ name: Translation dan-eng
43
+ type: translation
44
+ args: dan-eng
45
+ dataset:
46
+ name: flores200-devtest
47
+ type: flores200-devtest
48
+ args: dan-eng
49
+ metrics:
50
+ - name: BLEU
51
+ type: bleu
52
+ value: 48.2
53
+ - name: chr-F
54
+ type: chrf
55
+ value: 0.71641
56
+ - task:
57
+ name: Translation dan-fra
58
+ type: translation
59
+ args: dan-fra
60
+ dataset:
61
+ name: flores200-devtest
62
+ type: flores200-devtest
63
+ args: dan-fra
64
+ metrics:
65
+ - name: BLEU
66
+ type: bleu
67
+ value: 38.9
68
+ - name: chr-F
69
+ type: chrf
70
+ value: 0.63777
71
+ - task:
72
+ name: Translation dan-por
73
+ type: translation
74
+ args: dan-por
75
+ dataset:
76
+ name: flores200-devtest
77
+ type: flores200-devtest
78
+ args: dan-por
79
+ metrics:
80
+ - name: BLEU
81
+ type: bleu
82
+ value: 36.7
83
+ - name: chr-F
84
+ type: chrf
85
+ value: 0.62302
86
+ - task:
87
+ name: Translation dan-spa
88
+ type: translation
89
+ args: dan-spa
90
+ dataset:
91
+ name: flores200-devtest
92
+ type: flores200-devtest
93
+ args: dan-spa
94
+ metrics:
95
+ - name: BLEU
96
+ type: bleu
97
+ value: 24.4
98
+ - name: chr-F
99
+ type: chrf
100
+ value: 0.52803
101
+ - task:
102
+ name: Translation fao-deu
103
+ type: translation
104
+ args: fao-deu
105
+ dataset:
106
+ name: flores200-devtest
107
+ type: flores200-devtest
108
+ args: fao-deu
109
+ metrics:
110
+ - name: BLEU
111
+ type: bleu
112
+ value: 16.0
113
+ - name: chr-F
114
+ type: chrf
115
+ value: 0.41184
116
+ - task:
117
+ name: Translation fao-eng
118
+ type: translation
119
+ args: fao-eng
120
+ dataset:
121
+ name: flores200-devtest
122
+ type: flores200-devtest
123
+ args: fao-eng
124
+ metrics:
125
+ - name: BLEU
126
+ type: bleu
127
+ value: 21.2
128
+ - name: chr-F
129
+ type: chrf
130
+ value: 0.43308
131
+ - task:
132
+ name: Translation fao-fra
133
+ type: translation
134
+ args: fao-fra
135
+ dataset:
136
+ name: flores200-devtest
137
+ type: flores200-devtest
138
+ args: fao-fra
139
+ metrics:
140
+ - name: BLEU
141
+ type: bleu
142
+ value: 16.7
143
+ - name: chr-F
144
+ type: chrf
145
+ value: 0.39253
146
+ - task:
147
+ name: Translation fao-por
148
+ type: translation
149
+ args: fao-por
150
+ dataset:
151
+ name: flores200-devtest
152
+ type: flores200-devtest
153
+ args: fao-por
154
+ metrics:
155
+ - name: BLEU
156
+ type: bleu
157
+ value: 19.0
158
+ - name: chr-F
159
+ type: chrf
160
+ value: 0.42649
161
+ - task:
162
+ name: Translation fao-spa
163
+ type: translation
164
+ args: fao-spa
165
+ dataset:
166
+ name: flores200-devtest
167
+ type: flores200-devtest
168
+ args: fao-spa
169
+ metrics:
170
+ - name: BLEU
171
+ type: bleu
172
+ value: 14.1
173
+ - name: chr-F
174
+ type: chrf
175
+ value: 0.38131
176
+ - task:
177
+ name: Translation isl-deu
178
+ type: translation
179
+ args: isl-deu
180
+ dataset:
181
+ name: flores200-devtest
182
+ type: flores200-devtest
183
+ args: isl-deu
184
+ metrics:
185
+ - name: BLEU
186
+ type: bleu
187
+ value: 22.7
188
+ - name: chr-F
189
+ type: chrf
190
+ value: 0.51165
191
+ - task:
192
+ name: Translation isl-eng
193
+ type: translation
194
+ args: isl-eng
195
+ dataset:
196
+ name: flores200-devtest
197
+ type: flores200-devtest
198
+ args: isl-eng
199
+ metrics:
200
+ - name: BLEU
201
+ type: bleu
202
+ value: 32.2
203
+ - name: chr-F
204
+ type: chrf
205
+ value: 0.57745
206
+ - task:
207
+ name: Translation isl-fra
208
+ type: translation
209
+ args: isl-fra
210
+ dataset:
211
+ name: flores200-devtest
212
+ type: flores200-devtest
213
+ args: isl-fra
214
+ metrics:
215
+ - name: BLEU
216
+ type: bleu
217
+ value: 27.6
218
+ - name: chr-F
219
+ type: chrf
220
+ value: 0.54210
221
+ - task:
222
+ name: Translation isl-por
223
+ type: translation
224
+ args: isl-por
225
+ dataset:
226
+ name: flores200-devtest
227
+ type: flores200-devtest
228
+ args: isl-por
229
+ metrics:
230
+ - name: BLEU
231
+ type: bleu
232
+ value: 26.1
233
+ - name: chr-F
234
+ type: chrf
235
+ value: 0.52479
236
+ - task:
237
+ name: Translation isl-spa
238
+ type: translation
239
+ args: isl-spa
240
+ dataset:
241
+ name: flores200-devtest
242
+ type: flores200-devtest
243
+ args: isl-spa
244
+ metrics:
245
+ - name: BLEU
246
+ type: bleu
247
+ value: 19.2
248
+ - name: chr-F
249
+ type: chrf
250
+ value: 0.46837
251
+ - task:
252
+ name: Translation nno-deu
253
+ type: translation
254
+ args: nno-deu
255
+ dataset:
256
+ name: flores200-devtest
257
+ type: flores200-devtest
258
+ args: nno-deu
259
+ metrics:
260
+ - name: BLEU
261
+ type: bleu
262
+ value: 29.2
263
+ - name: chr-F
264
+ type: chrf
265
+ value: 0.58054
266
+ - task:
267
+ name: Translation nno-eng
268
+ type: translation
269
+ args: nno-eng
270
+ dataset:
271
+ name: flores200-devtest
272
+ type: flores200-devtest
273
+ args: nno-eng
274
+ metrics:
275
+ - name: BLEU
276
+ type: bleu
277
+ value: 45.0
278
+ - name: chr-F
279
+ type: chrf
280
+ value: 0.69114
281
+ - task:
282
+ name: Translation nno-fra
283
+ type: translation
284
+ args: nno-fra
285
+ dataset:
286
+ name: flores200-devtest
287
+ type: flores200-devtest
288
+ args: nno-fra
289
+ metrics:
290
+ - name: BLEU
291
+ type: bleu
292
+ value: 36.0
293
+ - name: chr-F
294
+ type: chrf
295
+ value: 0.61334
296
+ - task:
297
+ name: Translation nno-por
298
+ type: translation
299
+ args: nno-por
300
+ dataset:
301
+ name: flores200-devtest
302
+ type: flores200-devtest
303
+ args: nno-por
304
+ metrics:
305
+ - name: BLEU
306
+ type: bleu
307
+ value: 34.1
308
+ - name: chr-F
309
+ type: chrf
310
+ value: 0.60055
311
+ - task:
312
+ name: Translation nno-spa
313
+ type: translation
314
+ args: nno-spa
315
+ dataset:
316
+ name: flores200-devtest
317
+ type: flores200-devtest
318
+ args: nno-spa
319
+ metrics:
320
+ - name: BLEU
321
+ type: bleu
322
+ value: 22.8
323
+ - name: chr-F
324
+ type: chrf
325
+ value: 0.51190
326
+ - task:
327
+ name: Translation nob-deu
328
+ type: translation
329
+ args: nob-deu
330
+ dataset:
331
+ name: flores200-devtest
332
+ type: flores200-devtest
333
+ args: nob-deu
334
+ metrics:
335
+ - name: BLEU
336
+ type: bleu
337
+ value: 27.6
338
+ - name: chr-F
339
+ type: chrf
340
+ value: 0.57023
341
+ - task:
342
+ name: Translation nob-eng
343
+ type: translation
344
+ args: nob-eng
345
+ dataset:
346
+ name: flores200-devtest
347
+ type: flores200-devtest
348
+ args: nob-eng
349
+ metrics:
350
+ - name: BLEU
351
+ type: bleu
352
+ value: 43.1
353
+ - name: chr-F
354
+ type: chrf
355
+ value: 0.67540
356
+ - task:
357
+ name: Translation nob-fra
358
+ type: translation
359
+ args: nob-fra
360
+ dataset:
361
+ name: flores200-devtest
362
+ type: flores200-devtest
363
+ args: nob-fra
364
+ metrics:
365
+ - name: BLEU
366
+ type: bleu
367
+ value: 34.2
368
+ - name: chr-F
369
+ type: chrf
370
+ value: 0.60568
371
+ - task:
372
+ name: Translation nob-por
373
+ type: translation
374
+ args: nob-por
375
+ dataset:
376
+ name: flores200-devtest
377
+ type: flores200-devtest
378
+ args: nob-por
379
+ metrics:
380
+ - name: BLEU
381
+ type: bleu
382
+ value: 32.8
383
+ - name: chr-F
384
+ type: chrf
385
+ value: 0.59466
386
+ - task:
387
+ name: Translation nob-spa
388
+ type: translation
389
+ args: nob-spa
390
+ dataset:
391
+ name: flores200-devtest
392
+ type: flores200-devtest
393
+ args: nob-spa
394
+ metrics:
395
+ - name: BLEU
396
+ type: bleu
397
+ value: 22.4
398
+ - name: chr-F
399
+ type: chrf
400
+ value: 0.51138
401
+ - task:
402
+ name: Translation swe-deu
403
+ type: translation
404
+ args: swe-deu
405
+ dataset:
406
+ name: flores200-devtest
407
+ type: flores200-devtest
408
+ args: swe-deu
409
+ metrics:
410
+ - name: BLEU
411
+ type: bleu
412
+ value: 32.6
413
+ - name: chr-F
414
+ type: chrf
415
+ value: 0.60630
416
+ - task:
417
+ name: Translation swe-eng
418
+ type: translation
419
+ args: swe-eng
420
+ dataset:
421
+ name: flores200-devtest
422
+ type: flores200-devtest
423
+ args: swe-eng
424
+ metrics:
425
+ - name: BLEU
426
+ type: bleu
427
+ value: 48.1
428
+ - name: chr-F
429
+ type: chrf
430
+ value: 0.70584
431
+ - task:
432
+ name: Translation swe-fra
433
+ type: translation
434
+ args: swe-fra
435
+ dataset:
436
+ name: flores200-devtest
437
+ type: flores200-devtest
438
+ args: swe-fra
439
+ metrics:
440
+ - name: BLEU
441
+ type: bleu
442
+ value: 39.1
443
+ - name: chr-F
444
+ type: chrf
445
+ value: 0.63608
446
+ - task:
447
+ name: Translation swe-por
448
+ type: translation
449
+ args: swe-por
450
+ dataset:
451
+ name: flores200-devtest
452
+ type: flores200-devtest
453
+ args: swe-por
454
+ metrics:
455
+ - name: BLEU
456
+ type: bleu
457
+ value: 36.4
458
+ - name: chr-F
459
+ type: chrf
460
+ value: 0.62046
461
+ - task:
462
+ name: Translation swe-spa
463
+ type: translation
464
+ args: swe-spa
465
+ dataset:
466
+ name: flores200-devtest
467
+ type: flores200-devtest
468
+ args: swe-spa
469
+ metrics:
470
+ - name: BLEU
471
+ type: bleu
472
+ value: 23.9
473
+ - name: chr-F
474
+ type: chrf
475
+ value: 0.52328
476
+ - task:
477
+ name: Translation dan-eng
478
+ type: translation
479
+ args: dan-eng
480
+ dataset:
481
+ name: flores101-devtest
482
+ type: flores_101
483
+ args: dan eng devtest
484
+ metrics:
485
+ - name: BLEU
486
+ type: bleu
487
+ value: 47.6
488
+ - name: chr-F
489
+ type: chrf
490
+ value: 0.71193
491
+ - task:
492
+ name: Translation dan-fra
493
+ type: translation
494
+ args: dan-fra
495
+ dataset:
496
+ name: flores101-devtest
497
+ type: flores_101
498
+ args: dan fra devtest
499
+ metrics:
500
+ - name: BLEU
501
+ type: bleu
502
+ value: 38.1
503
+ - name: chr-F
504
+ type: chrf
505
+ value: 0.63349
506
+ - task:
507
+ name: Translation dan-por
508
+ type: translation
509
+ args: dan-por
510
+ dataset:
511
+ name: flores101-devtest
512
+ type: flores_101
513
+ args: dan por devtest
514
+ metrics:
515
+ - name: BLEU
516
+ type: bleu
517
+ value: 36.2
518
+ - name: chr-F
519
+ type: chrf
520
+ value: 0.62063
521
+ - task:
522
+ name: Translation dan-spa
523
+ type: translation
524
+ args: dan-spa
525
+ dataset:
526
+ name: flores101-devtest
527
+ type: flores_101
528
+ args: dan spa devtest
529
+ metrics:
530
+ - name: BLEU
531
+ type: bleu
532
+ value: 24.2
533
+ - name: chr-F
534
+ type: chrf
535
+ value: 0.52557
536
+ - task:
537
+ name: Translation isl-deu
538
+ type: translation
539
+ args: isl-deu
540
+ dataset:
541
+ name: flores101-devtest
542
+ type: flores_101
543
+ args: isl deu devtest
544
+ metrics:
545
+ - name: BLEU
546
+ type: bleu
547
+ value: 22.2
548
+ - name: chr-F
549
+ type: chrf
550
+ value: 0.50581
551
+ - task:
552
+ name: Translation isl-eng
553
+ type: translation
554
+ args: isl-eng
555
+ dataset:
556
+ name: flores101-devtest
557
+ type: flores_101
558
+ args: isl eng devtest
559
+ metrics:
560
+ - name: BLEU
561
+ type: bleu
562
+ value: 31.6
563
+ - name: chr-F
564
+ type: chrf
565
+ value: 0.57294
566
+ - task:
567
+ name: Translation isl-por
568
+ type: translation
569
+ args: isl-por
570
+ dataset:
571
+ name: flores101-devtest
572
+ type: flores_101
573
+ args: isl por devtest
574
+ metrics:
575
+ - name: BLEU
576
+ type: bleu
577
+ value: 25.8
578
+ - name: chr-F
579
+ type: chrf
580
+ value: 0.52192
581
+ - task:
582
+ name: Translation isl-spa
583
+ type: translation
584
+ args: isl-spa
585
+ dataset:
586
+ name: flores101-devtest
587
+ type: flores_101
588
+ args: isl spa devtest
589
+ metrics:
590
+ - name: BLEU
591
+ type: bleu
592
+ value: 18.5
593
+ - name: chr-F
594
+ type: chrf
595
+ value: 0.46364
596
+ - task:
597
+ name: Translation nob-eng
598
+ type: translation
599
+ args: nob-eng
600
+ dataset:
601
+ name: flores101-devtest
602
+ type: flores_101
603
+ args: nob eng devtest
604
+ metrics:
605
+ - name: BLEU
606
+ type: bleu
607
+ value: 42.6
608
+ - name: chr-F
609
+ type: chrf
610
+ value: 0.67120
611
+ - task:
612
+ name: Translation nob-fra
613
+ type: translation
614
+ args: nob-fra
615
+ dataset:
616
+ name: flores101-devtest
617
+ type: flores_101
618
+ args: nob fra devtest
619
+ metrics:
620
+ - name: BLEU
621
+ type: bleu
622
+ value: 33.9
623
+ - name: chr-F
624
+ type: chrf
625
+ value: 0.60289
626
+ - task:
627
+ name: Translation nob-spa
628
+ type: translation
629
+ args: nob-spa
630
+ dataset:
631
+ name: flores101-devtest
632
+ type: flores_101
633
+ args: nob spa devtest
634
+ metrics:
635
+ - name: BLEU
636
+ type: bleu
637
+ value: 21.9
638
+ - name: chr-F
639
+ type: chrf
640
+ value: 0.50848
641
+ - task:
642
+ name: Translation swe-deu
643
+ type: translation
644
+ args: swe-deu
645
+ dataset:
646
+ name: flores101-devtest
647
+ type: flores_101
648
+ args: swe deu devtest
649
+ metrics:
650
+ - name: BLEU
651
+ type: bleu
652
+ value: 32.2
653
+ - name: chr-F
654
+ type: chrf
655
+ value: 0.60306
656
+ - task:
657
+ name: Translation swe-eng
658
+ type: translation
659
+ args: swe-eng
660
+ dataset:
661
+ name: flores101-devtest
662
+ type: flores_101
663
+ args: swe eng devtest
664
+ metrics:
665
+ - name: BLEU
666
+ type: bleu
667
+ value: 47.9
668
+ - name: chr-F
669
+ type: chrf
670
+ value: 0.70404
671
+ - task:
672
+ name: Translation swe-por
673
+ type: translation
674
+ args: swe-por
675
+ dataset:
676
+ name: flores101-devtest
677
+ type: flores_101
678
+ args: swe por devtest
679
+ metrics:
680
+ - name: BLEU
681
+ type: bleu
682
+ value: 35.7
683
+ - name: chr-F
684
+ type: chrf
685
+ value: 0.61418
686
+ - task:
687
+ name: Translation dan-deu
688
+ type: translation
689
+ args: dan-deu
690
+ dataset:
691
+ name: ntrex128
692
+ type: ntrex128
693
+ args: dan-deu
694
+ metrics:
695
+ - name: BLEU
696
+ type: bleu
697
+ value: 25.3
698
+ - name: chr-F
699
+ type: chrf
700
+ value: 0.54229
701
+ - task:
702
+ name: Translation dan-eng
703
+ type: translation
704
+ args: dan-eng
705
+ dataset:
706
+ name: ntrex128
707
+ type: ntrex128
708
+ args: dan-eng
709
+ metrics:
710
+ - name: BLEU
711
+ type: bleu
712
+ value: 38.7
713
+ - name: chr-F
714
+ type: chrf
715
+ value: 0.63083
716
+ - task:
717
+ name: Translation dan-fra
718
+ type: translation
719
+ args: dan-fra
720
+ dataset:
721
+ name: ntrex128
722
+ type: ntrex128
723
+ args: dan-fra
724
+ metrics:
725
+ - name: BLEU
726
+ type: bleu
727
+ value: 26.2
728
+ - name: chr-F
729
+ type: chrf
730
+ value: 0.54088
731
+ - task:
732
+ name: Translation dan-por
733
+ type: translation
734
+ args: dan-por
735
+ dataset:
736
+ name: ntrex128
737
+ type: ntrex128
738
+ args: dan-por
739
+ metrics:
740
+ - name: BLEU
741
+ type: bleu
742
+ value: 27.0
743
+ - name: chr-F
744
+ type: chrf
745
+ value: 0.53626
746
+ - task:
747
+ name: Translation dan-spa
748
+ type: translation
749
+ args: dan-spa
750
+ dataset:
751
+ name: ntrex128
752
+ type: ntrex128
753
+ args: dan-spa
754
+ metrics:
755
+ - name: BLEU
756
+ type: bleu
757
+ value: 30.8
758
+ - name: chr-F
759
+ type: chrf
760
+ value: 0.56217
761
+ - task:
762
+ name: Translation fao-deu
763
+ type: translation
764
+ args: fao-deu
765
+ dataset:
766
+ name: ntrex128
767
+ type: ntrex128
768
+ args: fao-deu
769
+ metrics:
770
+ - name: BLEU
771
+ type: bleu
772
+ value: 16.4
773
+ - name: chr-F
774
+ type: chrf
775
+ value: 0.41701
776
+ - task:
777
+ name: Translation fao-eng
778
+ type: translation
779
+ args: fao-eng
780
+ dataset:
781
+ name: ntrex128
782
+ type: ntrex128
783
+ args: fao-eng
784
+ metrics:
785
+ - name: BLEU
786
+ type: bleu
787
+ value: 25.3
788
+ - name: chr-F
789
+ type: chrf
790
+ value: 0.47105
791
+ - task:
792
+ name: Translation fao-fra
793
+ type: translation
794
+ args: fao-fra
795
+ dataset:
796
+ name: ntrex128
797
+ type: ntrex128
798
+ args: fao-fra
799
+ metrics:
800
+ - name: BLEU
801
+ type: bleu
802
+ value: 16.3
803
+ - name: chr-F
804
+ type: chrf
805
+ value: 0.40070
806
+ - task:
807
+ name: Translation fao-por
808
+ type: translation
809
+ args: fao-por
810
+ dataset:
811
+ name: ntrex128
812
+ type: ntrex128
813
+ args: fao-por
814
+ metrics:
815
+ - name: BLEU
816
+ type: bleu
817
+ value: 18.0
818
+ - name: chr-F
819
+ type: chrf
820
+ value: 0.42005
821
+ - task:
822
+ name: Translation fao-spa
823
+ type: translation
824
+ args: fao-spa
825
+ dataset:
826
+ name: ntrex128
827
+ type: ntrex128
828
+ args: fao-spa
829
+ metrics:
830
+ - name: BLEU
831
+ type: bleu
832
+ value: 20.5
833
+ - name: chr-F
834
+ type: chrf
835
+ value: 0.44085
836
+ - task:
837
+ name: Translation isl-deu
838
+ type: translation
839
+ args: isl-deu
840
+ dataset:
841
+ name: ntrex128
842
+ type: ntrex128
843
+ args: isl-deu
844
+ metrics:
845
+ - name: BLEU
846
+ type: bleu
847
+ value: 20.5
848
+ - name: chr-F
849
+ type: chrf
850
+ value: 0.49932
851
+ - task:
852
+ name: Translation isl-eng
853
+ type: translation
854
+ args: isl-eng
855
+ dataset:
856
+ name: ntrex128
857
+ type: ntrex128
858
+ args: isl-eng
859
+ metrics:
860
+ - name: BLEU
861
+ type: bleu
862
+ value: 29.7
863
+ - name: chr-F
864
+ type: chrf
865
+ value: 0.56856
866
+ - task:
867
+ name: Translation isl-fra
868
+ type: translation
869
+ args: isl-fra
870
+ dataset:
871
+ name: ntrex128
872
+ type: ntrex128
873
+ args: isl-fra
874
+ metrics:
875
+ - name: BLEU
876
+ type: bleu
877
+ value: 24.6
878
+ - name: chr-F
879
+ type: chrf
880
+ value: 0.51998
881
+ - task:
882
+ name: Translation isl-por
883
+ type: translation
884
+ args: isl-por
885
+ dataset:
886
+ name: ntrex128
887
+ type: ntrex128
888
+ args: isl-por
889
+ metrics:
890
+ - name: BLEU
891
+ type: bleu
892
+ value: 21.7
893
+ - name: chr-F
894
+ type: chrf
895
+ value: 0.49903
896
+ - task:
897
+ name: Translation isl-spa
898
+ type: translation
899
+ args: isl-spa
900
+ dataset:
901
+ name: ntrex128
902
+ type: ntrex128
903
+ args: isl-spa
904
+ metrics:
905
+ - name: BLEU
906
+ type: bleu
907
+ value: 27.1
908
+ - name: chr-F
909
+ type: chrf
910
+ value: 0.53171
911
+ - task:
912
+ name: Translation nno-deu
913
+ type: translation
914
+ args: nno-deu
915
+ dataset:
916
+ name: ntrex128
917
+ type: ntrex128
918
+ args: nno-deu
919
+ metrics:
920
+ - name: BLEU
921
+ type: bleu
922
+ value: 24.4
923
+ - name: chr-F
924
+ type: chrf
925
+ value: 0.53000
926
+ - task:
927
+ name: Translation nno-eng
928
+ type: translation
929
+ args: nno-eng
930
+ dataset:
931
+ name: ntrex128
932
+ type: ntrex128
933
+ args: nno-eng
934
+ metrics:
935
+ - name: BLEU
936
+ type: bleu
937
+ value: 42.9
938
+ - name: chr-F
939
+ type: chrf
940
+ value: 0.65866
941
+ - task:
942
+ name: Translation nno-fra
943
+ type: translation
944
+ args: nno-fra
945
+ dataset:
946
+ name: ntrex128
947
+ type: ntrex128
948
+ args: nno-fra
949
+ metrics:
950
+ - name: BLEU
951
+ type: bleu
952
+ value: 27.5
953
+ - name: chr-F
954
+ type: chrf
955
+ value: 0.54339
956
+ - task:
957
+ name: Translation nno-por
958
+ type: translation
959
+ args: nno-por
960
+ dataset:
961
+ name: ntrex128
962
+ type: ntrex128
963
+ args: nno-por
964
+ metrics:
965
+ - name: BLEU
966
+ type: bleu
967
+ value: 26.3
968
+ - name: chr-F
969
+ type: chrf
970
+ value: 0.53242
971
+ - task:
972
+ name: Translation nno-spa
973
+ type: translation
974
+ args: nno-spa
975
+ dataset:
976
+ name: ntrex128
977
+ type: ntrex128
978
+ args: nno-spa
979
+ metrics:
980
+ - name: BLEU
981
+ type: bleu
982
+ value: 30.4
983
+ - name: chr-F
984
+ type: chrf
985
+ value: 0.55889
986
+ - task:
987
+ name: Translation nob-deu
988
+ type: translation
989
+ args: nob-deu
990
+ dataset:
991
+ name: ntrex128
992
+ type: ntrex128
993
+ args: nob-deu
994
+ metrics:
995
+ - name: BLEU
996
+ type: bleu
997
+ value: 26.8
998
+ - name: chr-F
999
+ type: chrf
1000
+ value: 0.55549
1001
+ - task:
1002
+ name: Translation nob-eng
1003
+ type: translation
1004
+ args: nob-eng
1005
+ dataset:
1006
+ name: ntrex128
1007
+ type: ntrex128
1008
+ args: nob-eng
1009
+ metrics:
1010
+ - name: BLEU
1011
+ type: bleu
1012
+ value: 40.9
1013
+ - name: chr-F
1014
+ type: chrf
1015
+ value: 0.65580
1016
+ - task:
1017
+ name: Translation nob-fra
1018
+ type: translation
1019
+ args: nob-fra
1020
+ dataset:
1021
+ name: ntrex128
1022
+ type: ntrex128
1023
+ args: nob-fra
1024
+ metrics:
1025
+ - name: BLEU
1026
+ type: bleu
1027
+ value: 29.2
1028
+ - name: chr-F
1029
+ type: chrf
1030
+ value: 0.56187
1031
+ - task:
1032
+ name: Translation nob-por
1033
+ type: translation
1034
+ args: nob-por
1035
+ dataset:
1036
+ name: ntrex128
1037
+ type: ntrex128
1038
+ args: nob-por
1039
+ metrics:
1040
+ - name: BLEU
1041
+ type: bleu
1042
+ value: 26.6
1043
+ - name: chr-F
1044
+ type: chrf
1045
+ value: 0.54392
1046
+ - task:
1047
+ name: Translation nob-spa
1048
+ type: translation
1049
+ args: nob-spa
1050
+ dataset:
1051
+ name: ntrex128
1052
+ type: ntrex128
1053
+ args: nob-spa
1054
+ metrics:
1055
+ - name: BLEU
1056
+ type: bleu
1057
+ value: 32.6
1058
+ - name: chr-F
1059
+ type: chrf
1060
+ value: 0.57998
1061
+ - task:
1062
+ name: Translation swe-deu
1063
+ type: translation
1064
+ args: swe-deu
1065
+ dataset:
1066
+ name: ntrex128
1067
+ type: ntrex128
1068
+ args: swe-deu
1069
+ metrics:
1070
+ - name: BLEU
1071
+ type: bleu
1072
+ value: 26.7
1073
+ - name: chr-F
1074
+ type: chrf
1075
+ value: 0.55549
1076
+ - task:
1077
+ name: Translation swe-eng
1078
+ type: translation
1079
+ args: swe-eng
1080
+ dataset:
1081
+ name: ntrex128
1082
+ type: ntrex128
1083
+ args: swe-eng
1084
+ metrics:
1085
+ - name: BLEU
1086
+ type: bleu
1087
+ value: 42.2
1088
+ - name: chr-F
1089
+ type: chrf
1090
+ value: 0.66348
1091
+ - task:
1092
+ name: Translation swe-fra
1093
+ type: translation
1094
+ args: swe-fra
1095
+ dataset:
1096
+ name: ntrex128
1097
+ type: ntrex128
1098
+ args: swe-fra
1099
+ metrics:
1100
+ - name: BLEU
1101
+ type: bleu
1102
+ value: 29.0
1103
+ - name: chr-F
1104
+ type: chrf
1105
+ value: 0.56310
1106
+ - task:
1107
+ name: Translation swe-por
1108
+ type: translation
1109
+ args: swe-por
1110
+ dataset:
1111
+ name: ntrex128
1112
+ type: ntrex128
1113
+ args: swe-por
1114
+ metrics:
1115
+ - name: BLEU
1116
+ type: bleu
1117
+ value: 27.8
1118
+ - name: chr-F
1119
+ type: chrf
1120
+ value: 0.54965
1121
+ - task:
1122
+ name: Translation swe-spa
1123
+ type: translation
1124
+ args: swe-spa
1125
+ dataset:
1126
+ name: ntrex128
1127
+ type: ntrex128
1128
+ args: swe-spa
1129
+ metrics:
1130
+ - name: BLEU
1131
+ type: bleu
1132
+ value: 32.8
1133
+ - name: chr-F
1134
+ type: chrf
1135
+ value: 0.58035
1136
+ - task:
1137
+ name: Translation dan-deu
1138
+ type: translation
1139
+ args: dan-deu
1140
+ dataset:
1141
+ name: tatoeba-test-v2021-08-07
1142
+ type: tatoeba_mt
1143
+ args: dan-deu
1144
+ metrics:
1145
+ - name: BLEU
1146
+ type: bleu
1147
+ value: 56.7
1148
+ - name: chr-F
1149
+ type: chrf
1150
+ value: 0.74460
1151
+ - task:
1152
+ name: Translation dan-eng
1153
+ type: translation
1154
+ args: dan-eng
1155
+ dataset:
1156
+ name: tatoeba-test-v2021-08-07
1157
+ type: tatoeba_mt
1158
+ args: dan-eng
1159
+ metrics:
1160
+ - name: BLEU
1161
+ type: bleu
1162
+ value: 64.3
1163
+ - name: chr-F
1164
+ type: chrf
1165
+ value: 0.77233
1166
+ - task:
1167
+ name: Translation dan-fra
1168
+ type: translation
1169
+ args: dan-fra
1170
+ dataset:
1171
+ name: tatoeba-test-v2021-08-07
1172
+ type: tatoeba_mt
1173
+ args: dan-fra
1174
+ metrics:
1175
+ - name: BLEU
1176
+ type: bleu
1177
+ value: 60.8
1178
+ - name: chr-F
1179
+ type: chrf
1180
+ value: 0.76425
1181
+ - task:
1182
+ name: Translation dan-por
1183
+ type: translation
1184
+ args: dan-por
1185
+ dataset:
1186
+ name: tatoeba-test-v2021-08-07
1187
+ type: tatoeba_mt
1188
+ args: dan-por
1189
+ metrics:
1190
+ - name: BLEU
1191
+ type: bleu
1192
+ value: 60.0
1193
+ - name: chr-F
1194
+ type: chrf
1195
+ value: 0.77248
1196
+ - task:
1197
+ name: Translation dan-spa
1198
+ type: translation
1199
+ args: dan-spa
1200
+ dataset:
1201
+ name: tatoeba-test-v2021-08-07
1202
+ type: tatoeba_mt
1203
+ args: dan-spa
1204
+ metrics:
1205
+ - name: BLEU
1206
+ type: bleu
1207
+ value: 54.9
1208
+ - name: chr-F
1209
+ type: chrf
1210
+ value: 0.72567
1211
+ - task:
1212
+ name: Translation fao-eng
1213
+ type: translation
1214
+ args: fao-eng
1215
+ dataset:
1216
+ name: tatoeba-test-v2021-08-07
1217
+ type: tatoeba_mt
1218
+ args: fao-eng
1219
+ metrics:
1220
+ - name: BLEU
1221
+ type: bleu
1222
+ value: 39.6
1223
+ - name: chr-F
1224
+ type: chrf
1225
+ value: 0.54571
1226
+ - task:
1227
+ name: Translation isl-deu
1228
+ type: translation
1229
+ args: isl-deu
1230
+ dataset:
1231
+ name: tatoeba-test-v2021-08-07
1232
+ type: tatoeba_mt
1233
+ args: isl-deu
1234
+ metrics:
1235
+ - name: BLEU
1236
+ type: bleu
1237
+ value: 51.4
1238
+ - name: chr-F
1239
+ type: chrf
1240
+ value: 0.68535
1241
+ - task:
1242
+ name: Translation isl-eng
1243
+ type: translation
1244
+ args: isl-eng
1245
+ dataset:
1246
+ name: tatoeba-test-v2021-08-07
1247
+ type: tatoeba_mt
1248
+ args: isl-eng
1249
+ metrics:
1250
+ - name: BLEU
1251
+ type: bleu
1252
+ value: 51.7
1253
+ - name: chr-F
1254
+ type: chrf
1255
+ value: 0.67066
1256
+ - task:
1257
+ name: Translation isl-spa
1258
+ type: translation
1259
+ args: isl-spa
1260
+ dataset:
1261
+ name: tatoeba-test-v2021-08-07
1262
+ type: tatoeba_mt
1263
+ args: isl-spa
1264
+ metrics:
1265
+ - name: BLEU
1266
+ type: bleu
1267
+ value: 48.5
1268
+ - name: chr-F
1269
+ type: chrf
1270
+ value: 0.65659
1271
+ - task:
1272
+ name: Translation multi-multi
1273
+ type: translation
1274
+ args: multi-multi
1275
+ dataset:
1276
+ name: tatoeba-test-v2020-07-28-v2023-09-26
1277
+ type: tatoeba_mt
1278
+ args: multi-multi
1279
+ metrics:
1280
+ - name: BLEU
1281
+ type: bleu
1282
+ value: 58.2
1283
+ - name: chr-F
1284
+ type: chrf
1285
+ value: 0.73325
1286
+ - task:
1287
+ name: Translation nno-eng
1288
+ type: translation
1289
+ args: nno-eng
1290
+ dataset:
1291
+ name: tatoeba-test-v2021-08-07
1292
+ type: tatoeba_mt
1293
+ args: nno-eng
1294
+ metrics:
1295
+ - name: BLEU
1296
+ type: bleu
1297
+ value: 55.5
1298
+ - name: chr-F
1299
+ type: chrf
1300
+ value: 0.69415
1301
+ - task:
1302
+ name: Translation nob-deu
1303
+ type: translation
1304
+ args: nob-deu
1305
+ dataset:
1306
+ name: tatoeba-test-v2021-08-07
1307
+ type: tatoeba_mt
1308
+ args: nob-deu
1309
+ metrics:
1310
+ - name: BLEU
1311
+ type: bleu
1312
+ value: 50.5
1313
+ - name: chr-F
1314
+ type: chrf
1315
+ value: 0.69862
1316
+ - task:
1317
+ name: Translation nob-eng
1318
+ type: translation
1319
+ args: nob-eng
1320
+ dataset:
1321
+ name: tatoeba-test-v2021-08-07
1322
+ type: tatoeba_mt
1323
+ args: nob-eng
1324
+ metrics:
1325
+ - name: BLEU
1326
+ type: bleu
1327
+ value: 59.2
1328
+ - name: chr-F
1329
+ type: chrf
1330
+ value: 0.72912
1331
+ - task:
1332
+ name: Translation nob-fra
1333
+ type: translation
1334
+ args: nob-fra
1335
+ dataset:
1336
+ name: tatoeba-test-v2021-08-07
1337
+ type: tatoeba_mt
1338
+ args: nob-fra
1339
+ metrics:
1340
+ - name: BLEU
1341
+ type: bleu
1342
+ value: 52.5
1343
+ - name: chr-F
1344
+ type: chrf
1345
+ value: 0.71392
1346
+ - task:
1347
+ name: Translation nob-spa
1348
+ type: translation
1349
+ args: nob-spa
1350
+ dataset:
1351
+ name: tatoeba-test-v2021-08-07
1352
+ type: tatoeba_mt
1353
+ args: nob-spa
1354
+ metrics:
1355
+ - name: BLEU
1356
+ type: bleu
1357
+ value: 55.1
1358
+ - name: chr-F
1359
+ type: chrf
1360
+ value: 0.73300
1361
+ - task:
1362
+ name: Translation nor-deu
1363
+ type: translation
1364
+ args: nor-deu
1365
+ dataset:
1366
+ name: tatoeba-test-v2021-08-07
1367
+ type: tatoeba_mt
1368
+ args: nor-deu
1369
+ metrics:
1370
+ - name: BLEU
1371
+ type: bleu
1372
+ value: 50.7
1373
+ - name: chr-F
1374
+ type: chrf
1375
+ value: 0.69923
1376
+ - task:
1377
+ name: Translation nor-eng
1378
+ type: translation
1379
+ args: nor-eng
1380
+ dataset:
1381
+ name: tatoeba-test-v2021-08-07
1382
+ type: tatoeba_mt
1383
+ args: nor-eng
1384
+ metrics:
1385
+ - name: BLEU
1386
+ type: bleu
1387
+ value: 58.8
1388
+ - name: chr-F
1389
+ type: chrf
1390
+ value: 0.72587
1391
+ - task:
1392
+ name: Translation nor-fra
1393
+ type: translation
1394
+ args: nor-fra
1395
+ dataset:
1396
+ name: tatoeba-test-v2021-08-07
1397
+ type: tatoeba_mt
1398
+ args: nor-fra
1399
+ metrics:
1400
+ - name: BLEU
1401
+ type: bleu
1402
+ value: 55.1
1403
+ - name: chr-F
1404
+ type: chrf
1405
+ value: 0.73052
1406
+ - task:
1407
+ name: Translation nor-por
1408
+ type: translation
1409
+ args: nor-por
1410
+ dataset:
1411
+ name: tatoeba-test-v2021-08-07
1412
+ type: tatoeba_mt
1413
+ args: nor-por
1414
+ metrics:
1415
+ - name: BLEU
1416
+ type: bleu
1417
+ value: 45.4
1418
+ - name: chr-F
1419
+ type: chrf
1420
+ value: 0.67948
1421
+ - task:
1422
+ name: Translation nor-spa
1423
+ type: translation
1424
+ args: nor-spa
1425
+ dataset:
1426
+ name: tatoeba-test-v2021-08-07
1427
+ type: tatoeba_mt
1428
+ args: nor-spa
1429
+ metrics:
1430
+ - name: BLEU
1431
+ type: bleu
1432
+ value: 55.3
1433
+ - name: chr-F
1434
+ type: chrf
1435
+ value: 0.73320
1436
+ - task:
1437
+ name: Translation swe-deu
1438
+ type: translation
1439
+ args: swe-deu
1440
+ dataset:
1441
+ name: tatoeba-test-v2021-08-07
1442
+ type: tatoeba_mt
1443
+ args: swe-deu
1444
+ metrics:
1445
+ - name: BLEU
1446
+ type: bleu
1447
+ value: 55.4
1448
+ - name: chr-F
1449
+ type: chrf
1450
+ value: 0.71816
1451
+ - task:
1452
+ name: Translation swe-eng
1453
+ type: translation
1454
+ args: swe-eng
1455
+ dataset:
1456
+ name: tatoeba-test-v2021-08-07
1457
+ type: tatoeba_mt
1458
+ args: swe-eng
1459
+ metrics:
1460
+ - name: BLEU
1461
+ type: bleu
1462
+ value: 64.8
1463
+ - name: chr-F
1464
+ type: chrf
1465
+ value: 0.76648
1466
+ - task:
1467
+ name: Translation swe-fra
1468
+ type: translation
1469
+ args: swe-fra
1470
+ dataset:
1471
+ name: tatoeba-test-v2021-08-07
1472
+ type: tatoeba_mt
1473
+ args: swe-fra
1474
+ metrics:
1475
+ - name: BLEU
1476
+ type: bleu
1477
+ value: 57.4
1478
+ - name: chr-F
1479
+ type: chrf
1480
+ value: 0.72847
1481
+ - task:
1482
+ name: Translation swe-por
1483
+ type: translation
1484
+ args: swe-por
1485
+ dataset:
1486
+ name: tatoeba-test-v2021-08-07
1487
+ type: tatoeba_mt
1488
+ args: swe-por
1489
+ metrics:
1490
+ - name: BLEU
1491
+ type: bleu
1492
+ value: 50.3
1493
+ - name: chr-F
1494
+ type: chrf
1495
+ value: 0.70554
1496
+ - task:
1497
+ name: Translation swe-spa
1498
+ type: translation
1499
+ args: swe-spa
1500
+ dataset:
1501
+ name: tatoeba-test-v2021-08-07
1502
+ type: tatoeba_mt
1503
+ args: swe-spa
1504
+ metrics:
1505
+ - name: BLEU
1506
+ type: bleu
1507
+ value: 54.3
1508
+ - name: chr-F
1509
+ type: chrf
1510
+ value: 0.70926
1511
+ - task:
1512
+ name: Translation isl-eng
1513
+ type: translation
1514
+ args: isl-eng
1515
+ dataset:
1516
+ name: newstest2021
1517
+ type: wmt-2021-news
1518
+ args: isl-eng
1519
+ metrics:
1520
+ - name: BLEU
1521
+ type: bleu
1522
+ value: 32.4
1523
+ - name: chr-F
1524
+ type: chrf
1525
+ value: 0.56364
1526
+ ---
1527
+ # opus-mt-tc-bible-big-gmq-deu_eng_fra_por_spa
1528
+
1529
+ ## Table of Contents
1530
+ - [Model Details](#model-details)
1531
+ - [Uses](#uses)
1532
+ - [Risks, Limitations and Biases](#risks-limitations-and-biases)
1533
+ - [How to Get Started With the Model](#how-to-get-started-with-the-model)
1534
+ - [Training](#training)
1535
+ - [Evaluation](#evaluation)
1536
+ - [Citation Information](#citation-information)
1537
+ - [Acknowledgements](#acknowledgements)
1538
+
1539
+ ## Model Details
1540
+
1541
+ Neural machine translation model for translating from North Germanic languages (gmq) to unknown (deu+eng+fra+por+spa).
1542
+
1543
+ This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
1544
+ **Model Description:**
1545
+ - **Developed by:** Language Technology Research Group at the University of Helsinki
1546
+ - **Model Type:** Translation (transformer-big)
1547
+ - **Release**: 2024-05-30
1548
+ - **License:** Apache-2.0
1549
+ - **Language(s):**
1550
+ - Source Language(s): dan fao isl nno nob non nor swe
1551
+ - Target Language(s): deu eng fra por spa
1552
+ - Valid Target Language Labels: >>deu<< >>eng<< >>fra<< >>por<< >>spa<< >>xxx<<
1553
+ - **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/gmq-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip)
1554
+ - **Resources for more information:**
1555
+ - [OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/gmq-deu%2Beng%2Bfra%2Bpor%2Bspa/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-05-30)
1556
+ - [OPUS-MT-train GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
1557
+ - [More information about MarianNMT models in the transformers library](https://huggingface.co/docs/transformers/model_doc/marian)
1558
+ - [Tatoeba Translation Challenge](https://github.com/Helsinki-NLP/Tatoeba-Challenge/)
1559
+ - [HPLT bilingual data v1 (as part of the Tatoeba Translation Challenge dataset)](https://hplt-project.org/datasets/v1)
1560
+ - [A massively parallel Bible corpus](https://aclanthology.org/L14-1215/)
1561
+
1562
+ This is a multilingual translation model with multiple target languages. A sentence initial language token is required in the form of `>>id<<` (id = valid target language ID), e.g. `>>deu<<`
1563
+
1564
+ ## Uses
1565
+
1566
+ This model can be used for translation and text-to-text generation.
1567
+
1568
+ ## Risks, Limitations and Biases
1569
+
1570
+ **CONTENT WARNING: Readers should be aware that the model is trained on various public data sets that may contain content that is disturbing, offensive, and can propagate historical and current stereotypes.**
1571
+
1572
+ Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)).
1573
+
1574
+ ## How to Get Started With the Model
1575
+
1576
+ A short example code:
1577
+
1578
+ ```python
1579
+ from transformers import MarianMTModel, MarianTokenizer
1580
+
1581
+ src_text = [
1582
+ ">>deu<< Replace this with text in an accepted source language.",
1583
+ ">>spa<< This is the second sentence."
1584
+ ]
1585
+
1586
+ model_name = "pytorch-models/opus-mt-tc-bible-big-gmq-deu_eng_fra_por_spa"
1587
+ tokenizer = MarianTokenizer.from_pretrained(model_name)
1588
+ model = MarianMTModel.from_pretrained(model_name)
1589
+ translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
1590
+
1591
+ for t in translated:
1592
+ print( tokenizer.decode(t, skip_special_tokens=True) )
1593
+ ```
1594
+
1595
+ You can also use OPUS-MT models with the transformers pipelines, for example:
1596
+
1597
+ ```python
1598
+ from transformers import pipeline
1599
+ pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-bible-big-gmq-deu_eng_fra_por_spa")
1600
+ print(pipe(">>deu<< Replace this with text in an accepted source language."))
1601
+ ```
1602
+
1603
+ ## Training
1604
+
1605
+ - **Data**: opusTCv20230926max50+bt+jhubc ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
1606
+ - **Pre-processing**: SentencePiece (spm32k,spm32k)
1607
+ - **Model Type:** transformer-big
1608
+ - **Original MarianNMT Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/gmq-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip)
1609
+ - **Training Scripts**: [GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
1610
+
1611
+ ## Evaluation
1612
+
1613
+ * [Model scores at the OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/gmq-deu%2Beng%2Bfra%2Bpor%2Bspa/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-05-30)
1614
+ * test set translations: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/gmq-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.test.txt)
1615
+ * test set scores: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/gmq-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.eval.txt)
1616
+ * benchmark results: [benchmark_results.txt](benchmark_results.txt)
1617
+ * benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
1618
+
1619
+ | langpair | testset | chr-F | BLEU | #sent | #words |
1620
+ |----------|---------|-------|-------|-------|--------|
1621
+ | dan-deu | tatoeba-test-v2021-08-07 | 0.74460 | 56.7 | 9998 | 76055 |
1622
+ | dan-eng | tatoeba-test-v2021-08-07 | 0.77233 | 64.3 | 10795 | 79684 |
1623
+ | dan-fra | tatoeba-test-v2021-08-07 | 0.76425 | 60.8 | 1731 | 11882 |
1624
+ | dan-por | tatoeba-test-v2021-08-07 | 0.77248 | 60.0 | 873 | 5360 |
1625
+ | dan-spa | tatoeba-test-v2021-08-07 | 0.72567 | 54.9 | 5000 | 35528 |
1626
+ | fao-eng | tatoeba-test-v2021-08-07 | 0.54571 | 39.6 | 294 | 1984 |
1627
+ | isl-deu | tatoeba-test-v2021-08-07 | 0.68535 | 51.4 | 969 | 6279 |
1628
+ | isl-eng | tatoeba-test-v2021-08-07 | 0.67066 | 51.7 | 2503 | 19788 |
1629
+ | isl-spa | tatoeba-test-v2021-08-07 | 0.65659 | 48.5 | 238 | 1229 |
1630
+ | nno-eng | tatoeba-test-v2021-08-07 | 0.69415 | 55.5 | 460 | 3524 |
1631
+ | nob-deu | tatoeba-test-v2021-08-07 | 0.69862 | 50.5 | 3525 | 33592 |
1632
+ | nob-eng | tatoeba-test-v2021-08-07 | 0.72912 | 59.2 | 4539 | 36823 |
1633
+ | nob-fra | tatoeba-test-v2021-08-07 | 0.71392 | 52.5 | 323 | 2269 |
1634
+ | nob-spa | tatoeba-test-v2021-08-07 | 0.73300 | 55.1 | 885 | 6866 |
1635
+ | nor-deu | tatoeba-test-v2021-08-07 | 0.69923 | 50.7 | 3651 | 34575 |
1636
+ | nor-eng | tatoeba-test-v2021-08-07 | 0.72587 | 58.8 | 5000 | 40355 |
1637
+ | nor-fra | tatoeba-test-v2021-08-07 | 0.73052 | 55.1 | 477 | 3213 |
1638
+ | nor-por | tatoeba-test-v2021-08-07 | 0.67948 | 45.4 | 481 | 4182 |
1639
+ | nor-spa | tatoeba-test-v2021-08-07 | 0.73320 | 55.3 | 960 | 7311 |
1640
+ | swe-deu | tatoeba-test-v2021-08-07 | 0.71816 | 55.4 | 3410 | 23494 |
1641
+ | swe-eng | tatoeba-test-v2021-08-07 | 0.76648 | 64.8 | 10362 | 68513 |
1642
+ | swe-fra | tatoeba-test-v2021-08-07 | 0.72847 | 57.4 | 1407 | 9580 |
1643
+ | swe-por | tatoeba-test-v2021-08-07 | 0.70554 | 50.3 | 320 | 2032 |
1644
+ | swe-spa | tatoeba-test-v2021-08-07 | 0.70926 | 54.3 | 1351 | 8235 |
1645
+ | dan-eng | flores101-devtest | 0.71193 | 47.6 | 1012 | 24721 |
1646
+ | dan-fra | flores101-devtest | 0.63349 | 38.1 | 1012 | 28343 |
1647
+ | dan-por | flores101-devtest | 0.62063 | 36.2 | 1012 | 26519 |
1648
+ | dan-spa | flores101-devtest | 0.52557 | 24.2 | 1012 | 29199 |
1649
+ | isl-deu | flores101-devtest | 0.50581 | 22.2 | 1012 | 25094 |
1650
+ | isl-eng | flores101-devtest | 0.57294 | 31.6 | 1012 | 24721 |
1651
+ | isl-por | flores101-devtest | 0.52192 | 25.8 | 1012 | 26519 |
1652
+ | isl-spa | flores101-devtest | 0.46364 | 18.5 | 1012 | 29199 |
1653
+ | nob-eng | flores101-devtest | 0.67120 | 42.6 | 1012 | 24721 |
1654
+ | nob-fra | flores101-devtest | 0.60289 | 33.9 | 1012 | 28343 |
1655
+ | nob-spa | flores101-devtest | 0.50848 | 21.9 | 1012 | 29199 |
1656
+ | swe-deu | flores101-devtest | 0.60306 | 32.2 | 1012 | 25094 |
1657
+ | swe-eng | flores101-devtest | 0.70404 | 47.9 | 1012 | 24721 |
1658
+ | swe-por | flores101-devtest | 0.61418 | 35.7 | 1012 | 26519 |
1659
+ | dan-deu | flores200-devtest | 0.60897 | 32.3 | 1012 | 25094 |
1660
+ | dan-eng | flores200-devtest | 0.71641 | 48.2 | 1012 | 24721 |
1661
+ | dan-fra | flores200-devtest | 0.63777 | 38.9 | 1012 | 28343 |
1662
+ | dan-por | flores200-devtest | 0.62302 | 36.7 | 1012 | 26519 |
1663
+ | dan-spa | flores200-devtest | 0.52803 | 24.4 | 1012 | 29199 |
1664
+ | fao-deu | flores200-devtest | 0.41184 | 16.0 | 1012 | 25094 |
1665
+ | fao-eng | flores200-devtest | 0.43308 | 21.2 | 1012 | 24721 |
1666
+ | fao-por | flores200-devtest | 0.42649 | 19.0 | 1012 | 26519 |
1667
+ | isl-deu | flores200-devtest | 0.51165 | 22.7 | 1012 | 25094 |
1668
+ | isl-eng | flores200-devtest | 0.57745 | 32.2 | 1012 | 24721 |
1669
+ | isl-fra | flores200-devtest | 0.54210 | 27.6 | 1012 | 28343 |
1670
+ | isl-por | flores200-devtest | 0.52479 | 26.1 | 1012 | 26519 |
1671
+ | isl-spa | flores200-devtest | 0.46837 | 19.2 | 1012 | 29199 |
1672
+ | nno-deu | flores200-devtest | 0.58054 | 29.2 | 1012 | 25094 |
1673
+ | nno-eng | flores200-devtest | 0.69114 | 45.0 | 1012 | 24721 |
1674
+ | nno-fra | flores200-devtest | 0.61334 | 36.0 | 1012 | 28343 |
1675
+ | nno-por | flores200-devtest | 0.60055 | 34.1 | 1012 | 26519 |
1676
+ | nno-spa | flores200-devtest | 0.51190 | 22.8 | 1012 | 29199 |
1677
+ | nob-deu | flores200-devtest | 0.57023 | 27.6 | 1012 | 25094 |
1678
+ | nob-eng | flores200-devtest | 0.67540 | 43.1 | 1012 | 24721 |
1679
+ | nob-fra | flores200-devtest | 0.60568 | 34.2 | 1012 | 28343 |
1680
+ | nob-por | flores200-devtest | 0.59466 | 32.8 | 1012 | 26519 |
1681
+ | nob-spa | flores200-devtest | 0.51138 | 22.4 | 1012 | 29199 |
1682
+ | swe-deu | flores200-devtest | 0.60630 | 32.6 | 1012 | 25094 |
1683
+ | swe-eng | flores200-devtest | 0.70584 | 48.1 | 1012 | 24721 |
1684
+ | swe-fra | flores200-devtest | 0.63608 | 39.1 | 1012 | 28343 |
1685
+ | swe-por | flores200-devtest | 0.62046 | 36.4 | 1012 | 26519 |
1686
+ | swe-spa | flores200-devtest | 0.52328 | 23.9 | 1012 | 29199 |
1687
+ | isl-eng | newstest2021 | 0.56364 | 32.4 | 1000 | 22529 |
1688
+ | dan-deu | ntrex128 | 0.54229 | 25.3 | 1997 | 48761 |
1689
+ | dan-eng | ntrex128 | 0.63083 | 38.7 | 1997 | 47673 |
1690
+ | dan-fra | ntrex128 | 0.54088 | 26.2 | 1997 | 53481 |
1691
+ | dan-por | ntrex128 | 0.53626 | 27.0 | 1997 | 51631 |
1692
+ | dan-spa | ntrex128 | 0.56217 | 30.8 | 1997 | 54107 |
1693
+ | fao-deu | ntrex128 | 0.41701 | 16.4 | 1997 | 48761 |
1694
+ | fao-eng | ntrex128 | 0.47105 | 25.3 | 1997 | 47673 |
1695
+ | fao-fra | ntrex128 | 0.40070 | 16.3 | 1997 | 53481 |
1696
+ | fao-por | ntrex128 | 0.42005 | 18.0 | 1997 | 51631 |
1697
+ | fao-spa | ntrex128 | 0.44085 | 20.5 | 1997 | 54107 |
1698
+ | isl-deu | ntrex128 | 0.49932 | 20.5 | 1997 | 48761 |
1699
+ | isl-eng | ntrex128 | 0.56856 | 29.7 | 1997 | 47673 |
1700
+ | isl-fra | ntrex128 | 0.51998 | 24.6 | 1997 | 53481 |
1701
+ | isl-por | ntrex128 | 0.49903 | 21.7 | 1997 | 51631 |
1702
+ | isl-spa | ntrex128 | 0.53171 | 27.1 | 1997 | 54107 |
1703
+ | nno-deu | ntrex128 | 0.53000 | 24.4 | 1997 | 48761 |
1704
+ | nno-eng | ntrex128 | 0.65866 | 42.9 | 1997 | 47673 |
1705
+ | nno-fra | ntrex128 | 0.54339 | 27.5 | 1997 | 53481 |
1706
+ | nno-por | ntrex128 | 0.53242 | 26.3 | 1997 | 51631 |
1707
+ | nno-spa | ntrex128 | 0.55889 | 30.4 | 1997 | 54107 |
1708
+ | nob-deu | ntrex128 | 0.55549 | 26.8 | 1997 | 48761 |
1709
+ | nob-eng | ntrex128 | 0.65580 | 40.9 | 1997 | 47673 |
1710
+ | nob-fra | ntrex128 | 0.56187 | 29.2 | 1997 | 53481 |
1711
+ | nob-por | ntrex128 | 0.54392 | 26.6 | 1997 | 51631 |
1712
+ | nob-spa | ntrex128 | 0.57998 | 32.6 | 1997 | 54107 |
1713
+ | swe-deu | ntrex128 | 0.55549 | 26.7 | 1997 | 48761 |
1714
+ | swe-eng | ntrex128 | 0.66348 | 42.2 | 1997 | 47673 |
1715
+ | swe-fra | ntrex128 | 0.56310 | 29.0 | 1997 | 53481 |
1716
+ | swe-por | ntrex128 | 0.54965 | 27.8 | 1997 | 51631 |
1717
+ | swe-spa | ntrex128 | 0.58035 | 32.8 | 1997 | 54107 |
1718
+
1719
+ ## Citation Information
1720
+
1721
+ * Publications: [Democratizing neural machine translation with OPUS-MT](https://doi.org/10.1007/s10579-023-09704-w) and [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
1722
+
1723
+ ```bibtex
1724
+ @article{tiedemann2023democratizing,
1725
+ title={Democratizing neural machine translation with {OPUS-MT}},
1726
+ author={Tiedemann, J{\"o}rg and Aulamo, Mikko and Bakshandaeva, Daria and Boggia, Michele and Gr{\"o}nroos, Stig-Arne and Nieminen, Tommi and Raganato, Alessandro and Scherrer, Yves and Vazquez, Raul and Virpioja, Sami},
1727
+ journal={Language Resources and Evaluation},
1728
+ number={58},
1729
+ pages={713--755},
1730
+ year={2023},
1731
+ publisher={Springer Nature},
1732
+ issn={1574-0218},
1733
+ doi={10.1007/s10579-023-09704-w}
1734
+ }
1735
+
1736
+ @inproceedings{tiedemann-thottingal-2020-opus,
1737
+ title = "{OPUS}-{MT} {--} Building open translation services for the World",
1738
+ author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
1739
+ booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
1740
+ month = nov,
1741
+ year = "2020",
1742
+ address = "Lisboa, Portugal",
1743
+ publisher = "European Association for Machine Translation",
1744
+ url = "https://aclanthology.org/2020.eamt-1.61",
1745
+ pages = "479--480",
1746
+ }
1747
+
1748
+ @inproceedings{tiedemann-2020-tatoeba,
1749
+ title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
1750
+ author = {Tiedemann, J{\"o}rg},
1751
+ booktitle = "Proceedings of the Fifth Conference on Machine Translation",
1752
+ month = nov,
1753
+ year = "2020",
1754
+ address = "Online",
1755
+ publisher = "Association for Computational Linguistics",
1756
+ url = "https://aclanthology.org/2020.wmt-1.139",
1757
+ pages = "1174--1182",
1758
+ }
1759
+ ```
1760
+
1761
+ ## Acknowledgements
1762
+
1763
+ The work is supported by the [HPLT project](https://hplt-project.org/), funded by the European Union’s Horizon Europe research and innovation programme under grant agreement No 101070350. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland, and the [EuroHPC supercomputer LUMI](https://www.lumi-supercomputer.eu/).
1764
+
1765
+ ## Model conversion info
1766
+
1767
+ * transformers version: 4.45.1
1768
+ * OPUS-MT git hash: 0882077
1769
+ * port time: Tue Oct 8 11:11:37 EEST 2024
1770
+ * port machine: LM0-400-22516.local
benchmark_results.txt ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ multi-multi tatoeba-test-v2020-07-28-v2023-09-26 0.73325 58.2 10000 75607
2
+ dan-eng flores101-devtest 0.71193 47.6 1012 24721
3
+ dan-fra flores101-devtest 0.63349 38.1 1012 28343
4
+ dan-por flores101-devtest 0.62063 36.2 1012 26519
5
+ dan-spa flores101-devtest 0.52557 24.2 1012 29199
6
+ isl-deu flores101-devtest 0.50581 22.2 1012 25094
7
+ isl-eng flores101-devtest 0.57294 31.6 1012 24721
8
+ isl-por flores101-devtest 0.52192 25.8 1012 26519
9
+ isl-spa flores101-devtest 0.46364 18.5 1012 29199
10
+ nob-eng flores101-devtest 0.67120 42.6 1012 24721
11
+ nob-fra flores101-devtest 0.60289 33.9 1012 28343
12
+ nob-spa flores101-devtest 0.50848 21.9 1012 29199
13
+ swe-deu flores101-devtest 0.60306 32.2 1012 25094
14
+ swe-eng flores101-devtest 0.70404 47.9 1012 24721
15
+ swe-por flores101-devtest 0.61418 35.7 1012 26519
16
+ dan-deu flores200-devtest 0.60897 32.3 1012 25094
17
+ dan-eng flores200-devtest 0.71641 48.2 1012 24721
18
+ dan-fra flores200-devtest 0.63777 38.9 1012 28343
19
+ dan-por flores200-devtest 0.62302 36.7 1012 26519
20
+ dan-spa flores200-devtest 0.52803 24.4 1012 29199
21
+ fao-deu flores200-devtest 0.41184 16.0 1012 25094
22
+ fao-eng flores200-devtest 0.43308 21.2 1012 24721
23
+ fao-fra flores200-devtest 0.39253 16.7 1012 28343
24
+ fao-por flores200-devtest 0.42649 19.0 1012 26519
25
+ fao-spa flores200-devtest 0.38131 14.1 1012 29199
26
+ isl-deu flores200-devtest 0.51165 22.7 1012 25094
27
+ isl-eng flores200-devtest 0.57745 32.2 1012 24721
28
+ isl-fra flores200-devtest 0.54210 27.6 1012 28343
29
+ isl-por flores200-devtest 0.52479 26.1 1012 26519
30
+ isl-spa flores200-devtest 0.46837 19.2 1012 29199
31
+ nno-deu flores200-devtest 0.58054 29.2 1012 25094
32
+ nno-eng flores200-devtest 0.69114 45.0 1012 24721
33
+ nno-fra flores200-devtest 0.61334 36.0 1012 28343
34
+ nno-por flores200-devtest 0.60055 34.1 1012 26519
35
+ nno-spa flores200-devtest 0.51190 22.8 1012 29199
36
+ nob-deu flores200-devtest 0.57023 27.6 1012 25094
37
+ nob-eng flores200-devtest 0.67540 43.1 1012 24721
38
+ nob-fra flores200-devtest 0.60568 34.2 1012 28343
39
+ nob-por flores200-devtest 0.59466 32.8 1012 26519
40
+ nob-spa flores200-devtest 0.51138 22.4 1012 29199
41
+ swe-deu flores200-devtest 0.60630 32.6 1012 25094
42
+ swe-eng flores200-devtest 0.70584 48.1 1012 24721
43
+ swe-fra flores200-devtest 0.63608 39.1 1012 28343
44
+ swe-por flores200-devtest 0.62046 36.4 1012 26519
45
+ swe-spa flores200-devtest 0.52328 23.9 1012 29199
46
+ isl-eng newstest2021 0.56364 32.4 1000 22529
47
+ dan-deu ntrex128 0.54229 25.3 1997 48761
48
+ dan-eng ntrex128 0.63083 38.7 1997 47673
49
+ dan-fra ntrex128 0.54088 26.2 1997 53481
50
+ dan-por ntrex128 0.53626 27.0 1997 51631
51
+ dan-spa ntrex128 0.56217 30.8 1997 54107
52
+ fao-deu ntrex128 0.41701 16.4 1997 48761
53
+ fao-eng ntrex128 0.47105 25.3 1997 47673
54
+ fao-fra ntrex128 0.40070 16.3 1997 53481
55
+ fao-por ntrex128 0.42005 18.0 1997 51631
56
+ fao-spa ntrex128 0.44085 20.5 1997 54107
57
+ isl-deu ntrex128 0.49932 20.5 1997 48761
58
+ isl-eng ntrex128 0.56856 29.7 1997 47673
59
+ isl-fra ntrex128 0.51998 24.6 1997 53481
60
+ isl-por ntrex128 0.49903 21.7 1997 51631
61
+ isl-spa ntrex128 0.53171 27.1 1997 54107
62
+ nno-deu ntrex128 0.53000 24.4 1997 48761
63
+ nno-eng ntrex128 0.65866 42.9 1997 47673
64
+ nno-fra ntrex128 0.54339 27.5 1997 53481
65
+ nno-por ntrex128 0.53242 26.3 1997 51631
66
+ nno-spa ntrex128 0.55889 30.4 1997 54107
67
+ nob-deu ntrex128 0.55549 26.8 1997 48761
68
+ nob-eng ntrex128 0.65580 40.9 1997 47673
69
+ nob-fra ntrex128 0.56187 29.2 1997 53481
70
+ nob-por ntrex128 0.54392 26.6 1997 51631
71
+ nob-spa ntrex128 0.57998 32.6 1997 54107
72
+ swe-deu ntrex128 0.55549 26.7 1997 48761
73
+ swe-eng ntrex128 0.66348 42.2 1997 47673
74
+ swe-fra ntrex128 0.56310 29.0 1997 53481
75
+ swe-por ntrex128 0.54965 27.8 1997 51631
76
+ swe-spa ntrex128 0.58035 32.8 1997 54107
77
+ dan-por tatoeba-test-v2020-07-28 0.77269 59.5 871 5351
78
+ isl-deu tatoeba-test-v2020-07-28 0.68031 50.7 968 6265
79
+ isl-eng tatoeba-test-v2020-07-28 0.66455 50.9 2500 19763
80
+ nor-fra tatoeba-test-v2020-07-28 0.72725 54.7 476 3205
81
+ swe-eng tatoeba-test-v2020-07-28 0.76394 64.6 10000 66002
82
+ swe-fra tatoeba-test-v2020-07-28 0.72733 57.4 1409 9585
83
+ swe-por tatoeba-test-v2020-07-28 0.70316 50.5 299 1878
84
+ dan-eng tatoeba-test-v2021-03-30 0.76859 63.9 10437 76848
85
+ isl-spa tatoeba-test-v2021-03-30 0.66227 48.1 239 1233
86
+ nob-spa tatoeba-test-v2021-03-30 0.73521 55.5 894 6934
87
+ nor-por tatoeba-test-v2021-03-30 0.67766 44.9 487 4251
88
+ nor-spa tatoeba-test-v2021-03-30 0.73479 55.7 970 7387
89
+ swe-eng tatoeba-test-v2021-03-30 0.76410 64.6 10151 67008
90
+ swe-por tatoeba-test-v2021-03-30 0.70525 50.6 319 1996
91
+ dan-deu tatoeba-test-v2021-08-07 0.74460 56.7 9998 76055
92
+ dan-eng tatoeba-test-v2021-08-07 0.77233 64.3 10795 79684
93
+ dan-fra tatoeba-test-v2021-08-07 0.76425 60.8 1731 11882
94
+ dan-por tatoeba-test-v2021-08-07 0.77248 60.0 873 5360
95
+ dan-spa tatoeba-test-v2021-08-07 0.72567 54.9 5000 35528
96
+ fao-eng tatoeba-test-v2021-08-07 0.54571 39.6 294 1984
97
+ isl-deu tatoeba-test-v2021-08-07 0.68535 51.4 969 6279
98
+ isl-eng tatoeba-test-v2021-08-07 0.67066 51.7 2503 19788
99
+ isl-spa tatoeba-test-v2021-08-07 0.65659 48.5 238 1229
100
+ nno-eng tatoeba-test-v2021-08-07 0.69415 55.5 460 3524
101
+ nob-deu tatoeba-test-v2021-08-07 0.69862 50.5 3525 33592
102
+ nob-eng tatoeba-test-v2021-08-07 0.72912 59.2 4539 36823
103
+ nob-fra tatoeba-test-v2021-08-07 0.71392 52.5 323 2269
104
+ nob-spa tatoeba-test-v2021-08-07 0.73300 55.1 885 6866
105
+ nor-deu tatoeba-test-v2021-08-07 0.69923 50.7 3651 34575
106
+ nor-eng tatoeba-test-v2021-08-07 0.72587 58.8 5000 40355
107
+ nor-fra tatoeba-test-v2021-08-07 0.73052 55.1 477 3213
108
+ nor-por tatoeba-test-v2021-08-07 0.67948 45.4 481 4182
109
+ nor-spa tatoeba-test-v2021-08-07 0.73320 55.3 960 7311
110
+ swe-deu tatoeba-test-v2021-08-07 0.71816 55.4 3410 23494
111
+ swe-eng tatoeba-test-v2021-08-07 0.76648 64.8 10362 68513
112
+ swe-fra tatoeba-test-v2021-08-07 0.72847 57.4 1407 9580
113
+ swe-por tatoeba-test-v2021-08-07 0.70554 50.3 320 2032
114
+ swe-spa tatoeba-test-v2021-08-07 0.70926 54.3 1351 8235
benchmark_translations.zip ADDED
File without changes
config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "pytorch-models/opus-mt-tc-bible-big-gmq-deu_eng_fra_por_spa",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "relu",
5
+ "architectures": [
6
+ "MarianMTModel"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 0,
10
+ "classifier_dropout": 0.0,
11
+ "d_model": 1024,
12
+ "decoder_attention_heads": 16,
13
+ "decoder_ffn_dim": 4096,
14
+ "decoder_layerdrop": 0.0,
15
+ "decoder_layers": 6,
16
+ "decoder_start_token_id": 56159,
17
+ "decoder_vocab_size": 56160,
18
+ "dropout": 0.1,
19
+ "encoder_attention_heads": 16,
20
+ "encoder_ffn_dim": 4096,
21
+ "encoder_layerdrop": 0.0,
22
+ "encoder_layers": 6,
23
+ "eos_token_id": 588,
24
+ "forced_eos_token_id": null,
25
+ "init_std": 0.02,
26
+ "is_encoder_decoder": true,
27
+ "max_length": null,
28
+ "max_position_embeddings": 1024,
29
+ "model_type": "marian",
30
+ "normalize_embedding": false,
31
+ "num_beams": null,
32
+ "num_hidden_layers": 6,
33
+ "pad_token_id": 56159,
34
+ "scale_embedding": true,
35
+ "share_encoder_decoder_embeddings": true,
36
+ "static_position_embeddings": true,
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.45.1",
39
+ "use_cache": true,
40
+ "vocab_size": 56160
41
+ }
generation_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bad_words_ids": [
4
+ [
5
+ 56159
6
+ ]
7
+ ],
8
+ "bos_token_id": 0,
9
+ "decoder_start_token_id": 56159,
10
+ "eos_token_id": 588,
11
+ "forced_eos_token_id": 588,
12
+ "max_length": 512,
13
+ "num_beams": 4,
14
+ "pad_token_id": 56159,
15
+ "transformers_version": "4.45.1"
16
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f74df8fa1f9e87b4c27d965d293d6caaffa2e289907b27efeb352ced17675993
3
+ size 935715120
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5540661c6aff351a853f4a69a36d9ecdfad65ae7106bd87bac041d0c13a7c265
3
+ size 935766341
source.spm ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b22d1f3eb9581c61d8d1e4386d9f73a6871fabd2ed6d6034f592da7a0edf04ed
3
+ size 803074
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
target.spm ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:627d519683b86431e7fc4b74085fa920aa034a0a77b7a7075fc9ada4c327b236
3
+ size 809730
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"source_lang": "gmq", "target_lang": "deu+eng+fra+por+spa", "unk_token": "<unk>", "eos_token": "</s>", "pad_token": "<pad>", "model_max_length": 512, "sp_model_kwargs": {}, "separate_vocabs": false, "special_tokens_map_file": null, "name_or_path": "marian-models/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30/gmq-deu+eng+fra+por+spa", "tokenizer_class": "MarianTokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff