yasmineelabbar commited on
Commit
081dc03
1 Parent(s): 23ccac8

Training in progress, step 500

Browse files
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d5930aef0944df309aa5b430269084fa239e73618762ce0cb39a64951ab0d71
3
- size 242069785
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50079a354f6c861cdade7fce2f96f99ab37a59e5f03ae12bd68a2c2678824f69
3
+ size 242071641
runs/Aug14_16-47-25_ab12a772dbfe/events.out.tfevents.1692031713.ab12a772dbfe.621.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f852307fffa5bbad8dab77860b08d502acd893ac3e27baa20c501e29b2d50e9
3
+ size 5189
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -104,10 +104,8 @@
104
  "clean_up_tokenization_spaces": true,
105
  "eos_token": "</s>",
106
  "extra_ids": 100,
107
- "legacy": true,
108
  "model_max_length": 512,
109
  "pad_token": "<pad>",
110
- "sp_model_kwargs": {},
111
  "tokenizer_class": "T5Tokenizer",
112
  "unk_token": "<unk>"
113
  }
 
104
  "clean_up_tokenization_spaces": true,
105
  "eos_token": "</s>",
106
  "extra_ids": 100,
 
107
  "model_max_length": 512,
108
  "pad_token": "<pad>",
 
109
  "tokenizer_class": "T5Tokenizer",
110
  "unk_token": "<unk>"
111
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f75472ddef5f4148d0f0489f921e2290921bb856ac3c5cdb9c08e9e01086dab6
3
  size 4155
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ceac52694f7da7df81ee224217a4b740f3447a76c210939ebb5250043d2e447d
3
  size 4155