tiedeman commited on
Commit
2475cf8
1 Parent(s): 07437be

Initial commit

Browse files
Files changed (5) hide show
  1. README.md +2 -2
  2. config.json +4 -9
  3. generation_config.json +16 -0
  4. model.safetensors +3 -0
  5. pytorch_model.bin +2 -2
README.md CHANGED
@@ -70,7 +70,7 @@ This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus
70
  - **Release**: 2024-08-17
71
  - **License:** Apache-2.0
72
  - **Language(s):**
73
- - Source Language(s): bel bos bul ces chu cnr csb dsb hbs hrv hsb mkd multi orv pol rue rus slk slv srp szl ukr
74
  - Target Language(s): eng
75
  - **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/sla-eng/opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip)
76
  - **Resources for more information:**
@@ -196,5 +196,5 @@ The work is supported by the [HPLT project](https://hplt-project.org/), funded b
196
 
197
  * transformers version: 4.45.1
198
  * OPUS-MT git hash: a44ab31
199
- * port time: Sun Oct 6 22:22:36 EEST 2024
200
  * port machine: LM0-400-22516.local
 
70
  - **Release**: 2024-08-17
71
  - **License:** Apache-2.0
72
  - **Language(s):**
73
+ - Source Language(s): bel bos bul ces chu cnr csb dsb hbs hrv hsb mkd orv pol rue rus slk slv srp szl ukr
74
  - Target Language(s): eng
75
  - **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/sla-eng/opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip)
76
  - **Resources for more information:**
 
196
 
197
  * transformers version: 4.45.1
198
  * OPUS-MT git hash: a44ab31
199
+ * port time: Sun Oct 6 23:22:34 EEST 2024
200
  * port machine: LM0-400-22516.local
config.json CHANGED
@@ -6,11 +6,6 @@
6
  "MarianMTModel"
7
  ],
8
  "attention_dropout": 0.0,
9
- "bad_words_ids": [
10
- [
11
- 59890
12
- ]
13
- ],
14
  "bos_token_id": 0,
15
  "classifier_dropout": 0.0,
16
  "d_model": 1024,
@@ -26,21 +21,21 @@
26
  "encoder_layerdrop": 0.0,
27
  "encoder_layers": 6,
28
  "eos_token_id": 940,
29
- "forced_eos_token_id": 940,
30
  "init_std": 0.02,
31
  "is_encoder_decoder": true,
32
- "max_length": 512,
33
  "max_position_embeddings": 1024,
34
  "model_type": "marian",
35
  "normalize_embedding": false,
36
- "num_beams": 4,
37
  "num_hidden_layers": 6,
38
  "pad_token_id": 59890,
39
  "scale_embedding": true,
40
  "share_encoder_decoder_embeddings": true,
41
  "static_position_embeddings": true,
42
  "torch_dtype": "float32",
43
- "transformers_version": "4.18.0.dev0",
44
  "use_cache": true,
45
  "vocab_size": 59891
46
  }
 
6
  "MarianMTModel"
7
  ],
8
  "attention_dropout": 0.0,
 
 
 
 
 
9
  "bos_token_id": 0,
10
  "classifier_dropout": 0.0,
11
  "d_model": 1024,
 
21
  "encoder_layerdrop": 0.0,
22
  "encoder_layers": 6,
23
  "eos_token_id": 940,
24
+ "forced_eos_token_id": null,
25
  "init_std": 0.02,
26
  "is_encoder_decoder": true,
27
+ "max_length": null,
28
  "max_position_embeddings": 1024,
29
  "model_type": "marian",
30
  "normalize_embedding": false,
31
+ "num_beams": null,
32
  "num_hidden_layers": 6,
33
  "pad_token_id": 59890,
34
  "scale_embedding": true,
35
  "share_encoder_decoder_embeddings": true,
36
  "static_position_embeddings": true,
37
  "torch_dtype": "float32",
38
+ "transformers_version": "4.45.1",
39
  "use_cache": true,
40
  "vocab_size": 59891
41
  }
generation_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bad_words_ids": [
4
+ [
5
+ 59890
6
+ ]
7
+ ],
8
+ "bos_token_id": 0,
9
+ "decoder_start_token_id": 59890,
10
+ "eos_token_id": 940,
11
+ "forced_eos_token_id": 940,
12
+ "max_length": 512,
13
+ "num_beams": 4,
14
+ "pad_token_id": 59890,
15
+ "transformers_version": "4.45.1"
16
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17b2841248a4778c0ffee5fc1b31d41fe306f962b72d4a4bb706a9a83cb7b50a
3
+ size 951012220
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:015361595f7c4affcfbb5494a5cc5f1118010e812975f36b952b17001d710e5b
3
- size 951074117
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1346fa7296d5b244f27cda924d6b5cc74abf5df4bac69dddb012c66f92301e47
3
+ size 951063493