Yova commited on
Commit
ed9225c
1 Parent(s): 9ec3adc

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -3,12 +3,12 @@
3
  "T5ForConditionalGeneration"
4
  ],
5
  "classifier_dropout": 0.0,
6
- "d_ff": 1024,
7
- "d_kv": 64,
8
- "d_model": 256,
9
  "decoder_start_token_id": 259,
10
  "dense_act_fn": "relu",
11
- "dropout_rate": 0.3,
12
  "eos_token_id": 1,
13
  "feed_forward_proj": "relu",
14
  "initializer_factor": 1.0,
@@ -16,9 +16,9 @@
16
  "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-06,
18
  "model_type": "t5",
19
- "num_decoder_layers": 4,
20
- "num_heads": 4,
21
- "num_layers": 4,
22
  "pad_token_id": 0,
23
  "relative_attention_max_distance": 64,
24
  "relative_attention_num_buckets": 16,
 
3
  "T5ForConditionalGeneration"
4
  ],
5
  "classifier_dropout": 0.0,
6
+ "d_ff": 512,
7
+ "d_kv": 16,
8
+ "d_model": 128,
9
  "decoder_start_token_id": 259,
10
  "dense_act_fn": "relu",
11
+ "dropout_rate": 0.1,
12
  "eos_token_id": 1,
13
  "feed_forward_proj": "relu",
14
  "initializer_factor": 1.0,
 
16
  "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-06,
18
  "model_type": "t5",
19
+ "num_decoder_layers": 6,
20
+ "num_heads": 6,
21
+ "num_layers": 6,
22
  "pad_token_id": 0,
23
  "relative_attention_max_distance": 64,
24
  "relative_attention_num_buckets": 16,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa0e7beeb9b3d714cfd4a63e0ef123d5075f8c5ae1d171c5ca98e30ffbb16713
3
- size 29787736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3a8e65e43b0054194dc59b9f3b489e9542fee2b3040ba2d36f8a39690590ec9
3
+ size 10059592
runs/Nov23_07-11-01_3a71d3232b61/events.out.tfevents.1700723461.3a71d3232b61.1811.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6e824d4802720ca08453c5c852969ed386472b695e49a461f6ae2f27957a37b
3
+ size 4454
runs/Nov23_07-11-26_3a71d3232b61/events.out.tfevents.1700723487.3a71d3232b61.1811.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:630cb60088d474293d43dacb5956308b19d050d6eeb9a889ea94ebe9170b407c
3
+ size 4779
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_bos_token": true,
3
  "added_tokens_decoder": {
4
  "0": {
5
  "content": "<pad>",
 
1
  {
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<pad>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b7fc95cde14b9eab8ea13f34449571c7b2f5cc1647d9677028afc6ec895ad75
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf936f7f2828f9e1d0bd97fe2f711ae48980e968211c90abc22f63e8764da5fd
3
  size 4728