yushu-li commited on
Commit
acd09cc
·
verified ·
1 Parent(s): 42509c3

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: Qwen/Qwen2.5-32B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: original
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # original
18
+
19
+ This model is a fine-tuned version of [Qwen/Qwen2.5-32B-Instruct](https://huggingface.co/Qwen/Qwen2.5-32B-Instruct) on the Sky-T1 dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-05
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 8
44
+ - gradient_accumulation_steps: 12
45
+ - total_train_batch_size: 96
46
+ - total_eval_batch_size: 64
47
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: cosine
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 3.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.46.1
59
+ - Pytorch 2.5.1+cu124
60
+ - Datasets 3.1.0
61
+ - Tokenizers 0.20.3
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.9892735251097027,
3
+ "total_flos": 1494941780148224.0,
4
+ "train_loss": 0.029626486676872944,
5
+ "train_runtime": 67167.8945,
6
+ "train_samples_per_second": 0.733,
7
+ "train_steps_per_second": 0.008
8
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-32B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 27648,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 70,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 40,
17
+ "num_hidden_layers": 64,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.46.1",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.46.1"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a508646f2416c107affa2565ae4338b65c06346fd0ca77fd2f549692e107817
3
+ size 4891730992
model-00002-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1cee9ca0f2064367e1372c344c0bd3f65bfbf5840065aeedcff7f8ec03ec92ca
3
+ size 4876059352
model-00003-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91532a53694ba0a04178e012b67696aaaeabfbd98ee9399ddb4ae74fcfa1c1a6
3
+ size 4876059384
model-00004-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8742c1f09702e874aaeea52f35b91d59ef3955f61ea1b415070d58fb09162184
3
+ size 4876059416
model-00005-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30bc9b8c48c81931480cfd3d797e779e75f727720bf129a26bd22375241cbb29
3
+ size 4876059416
model-00006-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7de1ab4637f40fe23ec5195f12ae9719b237f1dee5070af2558e9198517598f6
3
+ size 4876059416
model-00007-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f893d42883bcb94b8630ac1952a49d3de259408924b9b90aee2624651e0896b7
3
+ size 4876059416
model-00008-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b92ec03df7b2404286f788a713a218307d1b1498cef0f2482bb4c038a83ca6ba
3
+ size 4876059416
model-00009-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b28e4d633d376b719693a03c64a0420ff85d6c3651eab680f878620cd509de31
3
+ size 4876059416
model-00010-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:291635ac7fd91e2f30fa9cb3cd4f7378a6e610cb242b0dae26f4581133eb67ed
3
+ size 4876059416
model-00011-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3006b91e470bb470ea41c1df1acdc4095e8d4e586bb8fcb9952b89a3f9ecc21
3
+ size 4876059416
model-00012-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb2a8432f9c756dca12471f1b2cfc38b31f38fbb20661ba6f176c442f9b33af1
3
+ size 4876059416
model-00013-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddc629b8fdd8770d9faa65cadbf0881cf2d3480fdf66f97c74e90d2d9e8979c2
3
+ size 4876059416
model-00014-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15ffe89489ca17850d82562550cd5faf1a9045d9f02c1f5b15c6163f65482aee
3
+ size 2123397800
model.safetensors.index.json ADDED
@@ -0,0 +1,778 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 65527752704
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00014-of-00014.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00014.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00003-of-00014.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00004-of-00014.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00014.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00005-of-00014.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00006-of-00014.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
260
+ "model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
261
+ "model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
262
+ "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
263
+ "model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
264
+ "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
265
+ "model.layers.28.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
266
+ "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
267
+ "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
268
+ "model.layers.28.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
269
+ "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
270
+ "model.layers.28.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
271
+ "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
272
+ "model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
273
+ "model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
274
+ "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
275
+ "model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
276
+ "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
277
+ "model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
278
+ "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
279
+ "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
280
+ "model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
281
+ "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
282
+ "model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
283
+ "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
284
+ "model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
285
+ "model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
286
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
287
+ "model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
288
+ "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
289
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
290
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
291
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
292
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
293
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
294
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
295
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
296
+ "model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
297
+ "model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
298
+ "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
299
+ "model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
300
+ "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
301
+ "model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
302
+ "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
303
+ "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
304
+ "model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
305
+ "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
306
+ "model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
307
+ "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
308
+ "model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
309
+ "model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
310
+ "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
311
+ "model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
312
+ "model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
313
+ "model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
314
+ "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
315
+ "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
316
+ "model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
317
+ "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
318
+ "model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
319
+ "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
320
+ "model.layers.32.input_layernorm.weight": "model-00007-of-00014.safetensors",
321
+ "model.layers.32.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
322
+ "model.layers.32.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
323
+ "model.layers.32.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
324
+ "model.layers.32.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
325
+ "model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
326
+ "model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
327
+ "model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
328
+ "model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
329
+ "model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
330
+ "model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
331
+ "model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
337
+ "model.layers.33.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
338
+ "model.layers.33.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
339
+ "model.layers.33.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
340
+ "model.layers.33.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
341
+ "model.layers.33.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
342
+ "model.layers.33.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
343
+ "model.layers.33.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
356
+ "model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
357
+ "model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
358
+ "model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
359
+ "model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
360
+ "model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
368
+ "model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
369
+ "model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
370
+ "model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
371
+ "model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
372
+ "model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
373
+ "model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
374
+ "model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
375
+ "model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
376
+ "model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
377
+ "model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
378
+ "model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
379
+ "model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
380
+ "model.layers.37.input_layernorm.weight": "model-00008-of-00014.safetensors",
381
+ "model.layers.37.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
382
+ "model.layers.37.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
383
+ "model.layers.37.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
384
+ "model.layers.37.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
385
+ "model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
386
+ "model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
387
+ "model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
388
+ "model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
389
+ "model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
390
+ "model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
391
+ "model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
392
+ "model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
393
+ "model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
394
+ "model.layers.38.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
395
+ "model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
396
+ "model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
397
+ "model.layers.38.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
398
+ "model.layers.38.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
399
+ "model.layers.38.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
400
+ "model.layers.38.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
401
+ "model.layers.38.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
402
+ "model.layers.38.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
403
+ "model.layers.38.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
404
+ "model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
405
+ "model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
406
+ "model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
407
+ "model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
408
+ "model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
409
+ "model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
410
+ "model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
411
+ "model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
412
+ "model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
413
+ "model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
414
+ "model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
415
+ "model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
416
+ "model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
417
+ "model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
418
+ "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
419
+ "model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
420
+ "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
421
+ "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
422
+ "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
423
+ "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
424
+ "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
425
+ "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
426
+ "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
427
+ "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
428
+ "model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
429
+ "model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
430
+ "model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
431
+ "model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
432
+ "model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
433
+ "model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
434
+ "model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
435
+ "model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
436
+ "model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
437
+ "model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
438
+ "model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
439
+ "model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
440
+ "model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
441
+ "model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
442
+ "model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
443
+ "model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
444
+ "model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
445
+ "model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
446
+ "model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
447
+ "model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
448
+ "model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
449
+ "model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
450
+ "model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
451
+ "model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
452
+ "model.layers.42.input_layernorm.weight": "model-00009-of-00014.safetensors",
453
+ "model.layers.42.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
454
+ "model.layers.42.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
455
+ "model.layers.42.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
456
+ "model.layers.42.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
457
+ "model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
458
+ "model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
459
+ "model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
460
+ "model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
461
+ "model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
462
+ "model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
463
+ "model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
464
+ "model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
465
+ "model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
466
+ "model.layers.43.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
467
+ "model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
468
+ "model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
469
+ "model.layers.43.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
470
+ "model.layers.43.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
471
+ "model.layers.43.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
472
+ "model.layers.43.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
473
+ "model.layers.43.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
474
+ "model.layers.43.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
475
+ "model.layers.43.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
476
+ "model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
477
+ "model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
478
+ "model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
479
+ "model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
480
+ "model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
481
+ "model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
482
+ "model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
483
+ "model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
484
+ "model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
485
+ "model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
486
+ "model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
487
+ "model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
488
+ "model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
489
+ "model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
490
+ "model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
491
+ "model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
492
+ "model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
493
+ "model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
494
+ "model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
495
+ "model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
496
+ "model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
497
+ "model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
498
+ "model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
499
+ "model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
500
+ "model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
501
+ "model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
502
+ "model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
503
+ "model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
504
+ "model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
505
+ "model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
506
+ "model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
507
+ "model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
508
+ "model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
509
+ "model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
510
+ "model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
511
+ "model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
512
+ "model.layers.47.input_layernorm.weight": "model-00010-of-00014.safetensors",
513
+ "model.layers.47.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
514
+ "model.layers.47.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
515
+ "model.layers.47.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
516
+ "model.layers.47.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
517
+ "model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
518
+ "model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
519
+ "model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
520
+ "model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
521
+ "model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
522
+ "model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
523
+ "model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
524
+ "model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
525
+ "model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
526
+ "model.layers.48.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
527
+ "model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
528
+ "model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
529
+ "model.layers.48.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
530
+ "model.layers.48.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
531
+ "model.layers.48.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
532
+ "model.layers.48.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
533
+ "model.layers.48.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
534
+ "model.layers.48.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
535
+ "model.layers.48.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
536
+ "model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
537
+ "model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
538
+ "model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
539
+ "model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
540
+ "model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
541
+ "model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
542
+ "model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
543
+ "model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
544
+ "model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
545
+ "model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
546
+ "model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
547
+ "model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
548
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
549
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
550
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
551
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
552
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
553
+ "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
554
+ "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
555
+ "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
556
+ "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
557
+ "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
558
+ "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
559
+ "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
560
+ "model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
561
+ "model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
562
+ "model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
563
+ "model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
564
+ "model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
565
+ "model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
566
+ "model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
567
+ "model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
568
+ "model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
569
+ "model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
570
+ "model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
571
+ "model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
572
+ "model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
573
+ "model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
574
+ "model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
575
+ "model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
576
+ "model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
577
+ "model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
578
+ "model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
579
+ "model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
580
+ "model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
581
+ "model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
582
+ "model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
583
+ "model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
584
+ "model.layers.52.input_layernorm.weight": "model-00011-of-00014.safetensors",
585
+ "model.layers.52.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
586
+ "model.layers.52.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
587
+ "model.layers.52.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
588
+ "model.layers.52.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
589
+ "model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
590
+ "model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
591
+ "model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
592
+ "model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
593
+ "model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
594
+ "model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
595
+ "model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
596
+ "model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
597
+ "model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
598
+ "model.layers.53.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
599
+ "model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
600
+ "model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
601
+ "model.layers.53.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
602
+ "model.layers.53.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
603
+ "model.layers.53.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
604
+ "model.layers.53.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
605
+ "model.layers.53.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
606
+ "model.layers.53.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
607
+ "model.layers.53.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
608
+ "model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
609
+ "model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
610
+ "model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
611
+ "model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
612
+ "model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
613
+ "model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
614
+ "model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
615
+ "model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
616
+ "model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
617
+ "model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
618
+ "model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
619
+ "model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
620
+ "model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
621
+ "model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
622
+ "model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
623
+ "model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
624
+ "model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
625
+ "model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
626
+ "model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
627
+ "model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
628
+ "model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
629
+ "model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
630
+ "model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
631
+ "model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
632
+ "model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
633
+ "model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
634
+ "model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
635
+ "model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
636
+ "model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
637
+ "model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
638
+ "model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
639
+ "model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
640
+ "model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
641
+ "model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
642
+ "model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
643
+ "model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
644
+ "model.layers.57.input_layernorm.weight": "model-00012-of-00014.safetensors",
645
+ "model.layers.57.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
646
+ "model.layers.57.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
647
+ "model.layers.57.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
648
+ "model.layers.57.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
649
+ "model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
650
+ "model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
651
+ "model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
652
+ "model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
653
+ "model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
654
+ "model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
655
+ "model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
656
+ "model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
657
+ "model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
658
+ "model.layers.58.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
659
+ "model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
660
+ "model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
661
+ "model.layers.58.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
662
+ "model.layers.58.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
663
+ "model.layers.58.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
664
+ "model.layers.58.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
665
+ "model.layers.58.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
666
+ "model.layers.58.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
667
+ "model.layers.58.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
668
+ "model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
669
+ "model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
670
+ "model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
671
+ "model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
672
+ "model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
673
+ "model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
674
+ "model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
675
+ "model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
676
+ "model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
677
+ "model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
678
+ "model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
679
+ "model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
680
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
681
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
682
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
683
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
684
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
685
+ "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
686
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
687
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
688
+ "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
689
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
690
+ "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
691
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
692
+ "model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
693
+ "model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
694
+ "model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
695
+ "model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
696
+ "model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
697
+ "model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
698
+ "model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
699
+ "model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
700
+ "model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
701
+ "model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
702
+ "model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
703
+ "model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
704
+ "model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
705
+ "model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
706
+ "model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
707
+ "model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
708
+ "model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
709
+ "model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
710
+ "model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
711
+ "model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
712
+ "model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
713
+ "model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
714
+ "model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
715
+ "model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
716
+ "model.layers.62.input_layernorm.weight": "model-00013-of-00014.safetensors",
717
+ "model.layers.62.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
718
+ "model.layers.62.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
719
+ "model.layers.62.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
720
+ "model.layers.62.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
721
+ "model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
722
+ "model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
723
+ "model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
724
+ "model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
725
+ "model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
726
+ "model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
727
+ "model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
728
+ "model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
729
+ "model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
730
+ "model.layers.63.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
731
+ "model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
732
+ "model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
733
+ "model.layers.63.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
734
+ "model.layers.63.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
735
+ "model.layers.63.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
736
+ "model.layers.63.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
737
+ "model.layers.63.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
738
+ "model.layers.63.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
739
+ "model.layers.63.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
740
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00014.safetensors",
741
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
742
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
743
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
744
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
745
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
746
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
747
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
748
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
749
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
750
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
751
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
752
+ "model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
753
+ "model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
754
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
755
+ "model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
756
+ "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
757
+ "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
758
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
759
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
760
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
761
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
762
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
763
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
764
+ "model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
765
+ "model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
766
+ "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
767
+ "model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
768
+ "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
769
+ "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
770
+ "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
771
+ "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
772
+ "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
773
+ "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
774
+ "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
775
+ "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
776
+ "model.norm.weight": "model-00014-of-00014.safetensors"
777
+ }
778
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|endoftext|>",
201
+ "errors": "replace",
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.9892735251097027,
3
+ "total_flos": 1494941780148224.0,
4
+ "train_loss": 0.029626486676872944,
5
+ "train_runtime": 67167.8945,
6
+ "train_samples_per_second": 0.733,
7
+ "train_steps_per_second": 0.008
8
+ }
trainer_log.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
trainer_state.json ADDED
@@ -0,0 +1,3612 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.9892735251097027,
5
+ "eval_steps": 500,
6
+ "global_step": 510,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.005850804485616773,
13
+ "grad_norm": 2.4042019844055176,
14
+ "learning_rate": 1.9607843137254904e-07,
15
+ "loss": 0.4984,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.011701608971233545,
20
+ "grad_norm": 2.46307110786438,
21
+ "learning_rate": 3.921568627450981e-07,
22
+ "loss": 0.5004,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.017552413456850317,
27
+ "grad_norm": 2.4179155826568604,
28
+ "learning_rate": 5.882352941176471e-07,
29
+ "loss": 0.5124,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.02340321794246709,
34
+ "grad_norm": 2.3512749671936035,
35
+ "learning_rate": 7.843137254901962e-07,
36
+ "loss": 0.4679,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.02925402242808386,
41
+ "grad_norm": 2.3498940467834473,
42
+ "learning_rate": 9.80392156862745e-07,
43
+ "loss": 0.513,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.035104826913700635,
48
+ "grad_norm": 2.35388445854187,
49
+ "learning_rate": 1.1764705882352942e-06,
50
+ "loss": 0.506,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.040955631399317405,
55
+ "grad_norm": 2.130160093307495,
56
+ "learning_rate": 1.3725490196078434e-06,
57
+ "loss": 0.481,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.04680643588493418,
62
+ "grad_norm": 1.7983100414276123,
63
+ "learning_rate": 1.5686274509803923e-06,
64
+ "loss": 0.4636,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.05265724037055095,
69
+ "grad_norm": 1.7637169361114502,
70
+ "learning_rate": 1.7647058823529414e-06,
71
+ "loss": 0.4812,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.05850804485616772,
76
+ "grad_norm": 1.7297167778015137,
77
+ "learning_rate": 1.96078431372549e-06,
78
+ "loss": 0.4754,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.0643588493417845,
83
+ "grad_norm": 1.4557020664215088,
84
+ "learning_rate": 2.1568627450980393e-06,
85
+ "loss": 0.4483,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.07020965382740127,
90
+ "grad_norm": 1.4000929594039917,
91
+ "learning_rate": 2.3529411764705885e-06,
92
+ "loss": 0.4637,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.07606045831301804,
97
+ "grad_norm": 1.2742167711257935,
98
+ "learning_rate": 2.549019607843137e-06,
99
+ "loss": 0.4512,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.08191126279863481,
104
+ "grad_norm": 0.9762641787528992,
105
+ "learning_rate": 2.7450980392156867e-06,
106
+ "loss": 0.4386,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.08776206728425158,
111
+ "grad_norm": 0.8408783078193665,
112
+ "learning_rate": 2.9411764705882355e-06,
113
+ "loss": 0.389,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.09361287176986836,
118
+ "grad_norm": 0.8847479224205017,
119
+ "learning_rate": 3.1372549019607846e-06,
120
+ "loss": 0.4037,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.09946367625548513,
125
+ "grad_norm": 0.8736488223075867,
126
+ "learning_rate": 3.3333333333333333e-06,
127
+ "loss": 0.4273,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.1053144807411019,
132
+ "grad_norm": 0.7419383525848389,
133
+ "learning_rate": 3.529411764705883e-06,
134
+ "loss": 0.3898,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.11116528522671867,
139
+ "grad_norm": 0.7293541431427002,
140
+ "learning_rate": 3.7254901960784316e-06,
141
+ "loss": 0.4233,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.11701608971233544,
146
+ "grad_norm": 1.9727623462677002,
147
+ "learning_rate": 3.92156862745098e-06,
148
+ "loss": 0.3701,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.12286689419795221,
153
+ "grad_norm": 0.915031373500824,
154
+ "learning_rate": 4.11764705882353e-06,
155
+ "loss": 0.3727,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.128717698683569,
160
+ "grad_norm": 0.9051816463470459,
161
+ "learning_rate": 4.313725490196079e-06,
162
+ "loss": 0.3683,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.13456850316918575,
167
+ "grad_norm": 0.7431566715240479,
168
+ "learning_rate": 4.509803921568628e-06,
169
+ "loss": 0.3567,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.14041930765480254,
174
+ "grad_norm": 0.5959689021110535,
175
+ "learning_rate": 4.705882352941177e-06,
176
+ "loss": 0.342,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.1462701121404193,
181
+ "grad_norm": 0.5637457966804504,
182
+ "learning_rate": 4.901960784313726e-06,
183
+ "loss": 0.3199,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.15212091662603608,
188
+ "grad_norm": 0.5946214199066162,
189
+ "learning_rate": 5.098039215686274e-06,
190
+ "loss": 0.3299,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.15797172111165286,
195
+ "grad_norm": 0.4843839108943939,
196
+ "learning_rate": 5.294117647058824e-06,
197
+ "loss": 0.3049,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.16382252559726962,
202
+ "grad_norm": 0.49762770533561707,
203
+ "learning_rate": 5.4901960784313735e-06,
204
+ "loss": 0.2883,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.1696733300828864,
209
+ "grad_norm": 0.5575108528137207,
210
+ "learning_rate": 5.686274509803922e-06,
211
+ "loss": 0.302,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.17552413456850316,
216
+ "grad_norm": 0.5406191945075989,
217
+ "learning_rate": 5.882352941176471e-06,
218
+ "loss": 0.2851,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.18137493905411994,
223
+ "grad_norm": 0.4934878945350647,
224
+ "learning_rate": 6.07843137254902e-06,
225
+ "loss": 0.2748,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.18722574353973673,
230
+ "grad_norm": 0.5784087777137756,
231
+ "learning_rate": 6.274509803921569e-06,
232
+ "loss": 0.2993,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.19307654802535348,
237
+ "grad_norm": 0.5021258592605591,
238
+ "learning_rate": 6.470588235294119e-06,
239
+ "loss": 0.276,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.19892735251097027,
244
+ "grad_norm": 0.44409283995628357,
245
+ "learning_rate": 6.666666666666667e-06,
246
+ "loss": 0.2574,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.20477815699658702,
251
+ "grad_norm": 0.4324759840965271,
252
+ "learning_rate": 6.862745098039216e-06,
253
+ "loss": 0.2427,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.2106289614822038,
258
+ "grad_norm": 0.4134741425514221,
259
+ "learning_rate": 7.058823529411766e-06,
260
+ "loss": 0.2693,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.21647976596782056,
265
+ "grad_norm": 0.3712354600429535,
266
+ "learning_rate": 7.2549019607843145e-06,
267
+ "loss": 0.2547,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.22233057045343735,
272
+ "grad_norm": 0.3564675748348236,
273
+ "learning_rate": 7.450980392156863e-06,
274
+ "loss": 0.2189,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.22818137493905413,
279
+ "grad_norm": 0.3792021572589874,
280
+ "learning_rate": 7.647058823529411e-06,
281
+ "loss": 0.2457,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.2340321794246709,
286
+ "grad_norm": 0.39038991928100586,
287
+ "learning_rate": 7.84313725490196e-06,
288
+ "loss": 0.2488,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.23988298391028767,
293
+ "grad_norm": 0.3869354724884033,
294
+ "learning_rate": 8.03921568627451e-06,
295
+ "loss": 0.2234,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.24573378839590443,
300
+ "grad_norm": 0.44067057967185974,
301
+ "learning_rate": 8.23529411764706e-06,
302
+ "loss": 0.25,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.2515845928815212,
307
+ "grad_norm": 0.34073254466056824,
308
+ "learning_rate": 8.43137254901961e-06,
309
+ "loss": 0.2214,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.257435397367138,
314
+ "grad_norm": 0.3576607406139374,
315
+ "learning_rate": 8.627450980392157e-06,
316
+ "loss": 0.222,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.26328620185275475,
321
+ "grad_norm": 0.3421995937824249,
322
+ "learning_rate": 8.823529411764707e-06,
323
+ "loss": 0.2023,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.2691370063383715,
328
+ "grad_norm": 0.36221471428871155,
329
+ "learning_rate": 9.019607843137256e-06,
330
+ "loss": 0.2078,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.2749878108239883,
335
+ "grad_norm": 0.3617911636829376,
336
+ "learning_rate": 9.215686274509804e-06,
337
+ "loss": 0.2067,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.2808386153096051,
342
+ "grad_norm": 0.37211400270462036,
343
+ "learning_rate": 9.411764705882354e-06,
344
+ "loss": 0.1999,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.28668941979522183,
349
+ "grad_norm": 0.3575108051300049,
350
+ "learning_rate": 9.607843137254903e-06,
351
+ "loss": 0.2098,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.2925402242808386,
356
+ "grad_norm": 0.3736143112182617,
357
+ "learning_rate": 9.803921568627451e-06,
358
+ "loss": 0.2121,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.2983910287664554,
363
+ "grad_norm": 0.3584408164024353,
364
+ "learning_rate": 1e-05,
365
+ "loss": 0.1986,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.30424183325207216,
370
+ "grad_norm": 0.32740485668182373,
371
+ "learning_rate": 9.999882884955554e-06,
372
+ "loss": 0.1744,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.3100926377376889,
377
+ "grad_norm": 0.3438873887062073,
378
+ "learning_rate": 9.999531545308584e-06,
379
+ "loss": 0.1955,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.3159434422233057,
384
+ "grad_norm": 0.3557385206222534,
385
+ "learning_rate": 9.998945997517957e-06,
386
+ "loss": 0.1908,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.3217942467089225,
391
+ "grad_norm": 0.3771747052669525,
392
+ "learning_rate": 9.998126269014255e-06,
393
+ "loss": 0.1933,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.32764505119453924,
398
+ "grad_norm": 0.3500851094722748,
399
+ "learning_rate": 9.997072398198492e-06,
400
+ "loss": 0.1892,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.333495855680156,
405
+ "grad_norm": 0.33903267979621887,
406
+ "learning_rate": 9.99578443444032e-06,
407
+ "loss": 0.1747,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.3393466601657728,
412
+ "grad_norm": 0.3747689425945282,
413
+ "learning_rate": 9.994262438075713e-06,
414
+ "loss": 0.2007,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.34519746465138956,
419
+ "grad_norm": 0.33843183517456055,
420
+ "learning_rate": 9.992506480404137e-06,
421
+ "loss": 0.1789,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.3510482691370063,
426
+ "grad_norm": 0.49277859926223755,
427
+ "learning_rate": 9.990516643685222e-06,
428
+ "loss": 0.1799,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.35689907362262313,
433
+ "grad_norm": 0.32874795794487,
434
+ "learning_rate": 9.988293021134888e-06,
435
+ "loss": 0.167,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.3627498781082399,
440
+ "grad_norm": 0.3928215503692627,
441
+ "learning_rate": 9.985835716921e-06,
442
+ "loss": 0.1715,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.36860068259385664,
447
+ "grad_norm": 0.36052584648132324,
448
+ "learning_rate": 9.983144846158472e-06,
449
+ "loss": 0.1785,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.37445148707947346,
454
+ "grad_norm": 0.33686086535453796,
455
+ "learning_rate": 9.980220534903889e-06,
456
+ "loss": 0.1747,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.3803022915650902,
461
+ "grad_norm": 0.36128610372543335,
462
+ "learning_rate": 9.977062920149583e-06,
463
+ "loss": 0.1773,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.38615309605070697,
468
+ "grad_norm": 0.3438204824924469,
469
+ "learning_rate": 9.973672149817232e-06,
470
+ "loss": 0.1595,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.3920039005363237,
475
+ "grad_norm": 0.3195885419845581,
476
+ "learning_rate": 9.970048382750925e-06,
477
+ "loss": 0.1698,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.39785470502194054,
482
+ "grad_norm": 0.3541395664215088,
483
+ "learning_rate": 9.966191788709716e-06,
484
+ "loss": 0.1732,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.4037055095075573,
489
+ "grad_norm": 0.3797125816345215,
490
+ "learning_rate": 9.96210254835968e-06,
491
+ "loss": 0.1855,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.40955631399317405,
496
+ "grad_norm": 0.4068518280982971,
497
+ "learning_rate": 9.957780853265441e-06,
498
+ "loss": 0.1803,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.41540711847879086,
503
+ "grad_norm": 0.3230303227901459,
504
+ "learning_rate": 9.953226905881208e-06,
505
+ "loss": 0.1551,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.4212579229644076,
510
+ "grad_norm": 0.3526642918586731,
511
+ "learning_rate": 9.948440919541277e-06,
512
+ "loss": 0.1659,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.4271087274500244,
517
+ "grad_norm": 0.36039891839027405,
518
+ "learning_rate": 9.943423118450051e-06,
519
+ "loss": 0.1741,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.43295953193564113,
524
+ "grad_norm": 0.4426978826522827,
525
+ "learning_rate": 9.938173737671531e-06,
526
+ "loss": 0.1747,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.43881033642125794,
531
+ "grad_norm": 0.37956029176712036,
532
+ "learning_rate": 9.932693023118299e-06,
533
+ "loss": 0.1766,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.4446611409068747,
538
+ "grad_norm": 0.3615328371524811,
539
+ "learning_rate": 9.926981231540007e-06,
540
+ "loss": 0.1775,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.45051194539249145,
545
+ "grad_norm": 0.37767109274864197,
546
+ "learning_rate": 9.921038630511345e-06,
547
+ "loss": 0.1829,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.45636274987810826,
552
+ "grad_norm": 0.35032397508621216,
553
+ "learning_rate": 9.91486549841951e-06,
554
+ "loss": 0.1714,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.462213554363725,
559
+ "grad_norm": 0.41910672187805176,
560
+ "learning_rate": 9.908462124451152e-06,
561
+ "loss": 0.1716,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.4680643588493418,
566
+ "grad_norm": 0.3652605414390564,
567
+ "learning_rate": 9.901828808578846e-06,
568
+ "loss": 0.1496,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.47391516333495853,
573
+ "grad_norm": 0.40993812680244446,
574
+ "learning_rate": 9.894965861547023e-06,
575
+ "loss": 0.1633,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.47976596782057535,
580
+ "grad_norm": 0.3730096220970154,
581
+ "learning_rate": 9.887873604857424e-06,
582
+ "loss": 0.1661,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.4856167723061921,
587
+ "grad_norm": 0.36139336228370667,
588
+ "learning_rate": 9.88055237075403e-06,
589
+ "loss": 0.1677,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.49146757679180886,
594
+ "grad_norm": 0.38538867235183716,
595
+ "learning_rate": 9.873002502207502e-06,
596
+ "loss": 0.1708,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.49731838127742567,
601
+ "grad_norm": 0.38366183638572693,
602
+ "learning_rate": 9.86522435289912e-06,
603
+ "loss": 0.1742,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.5031691857630424,
608
+ "grad_norm": 0.3716081380844116,
609
+ "learning_rate": 9.857218287204204e-06,
610
+ "loss": 0.1539,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.5090199902486592,
615
+ "grad_norm": 0.40919578075408936,
616
+ "learning_rate": 9.848984680175049e-06,
617
+ "loss": 0.1774,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.514870794734276,
622
+ "grad_norm": 0.3685464560985565,
623
+ "learning_rate": 9.840523917523354e-06,
624
+ "loss": 0.1686,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.5207215992198927,
629
+ "grad_norm": 0.3441756069660187,
630
+ "learning_rate": 9.831836395602164e-06,
631
+ "loss": 0.1497,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.5265724037055095,
636
+ "grad_norm": 0.3777903914451599,
637
+ "learning_rate": 9.822922521387277e-06,
638
+ "loss": 0.1615,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.5324232081911263,
643
+ "grad_norm": 0.3399229347705841,
644
+ "learning_rate": 9.813782712458206e-06,
645
+ "loss": 0.1558,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.538274012676743,
650
+ "grad_norm": 0.41103455424308777,
651
+ "learning_rate": 9.804417396978605e-06,
652
+ "loss": 0.1591,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.5441248171623598,
657
+ "grad_norm": 0.4678979218006134,
658
+ "learning_rate": 9.794827013676206e-06,
659
+ "loss": 0.1793,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.5499756216479766,
664
+ "grad_norm": 0.3981055021286011,
665
+ "learning_rate": 9.78501201182228e-06,
666
+ "loss": 0.1731,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.5558264261335933,
671
+ "grad_norm": 0.3952052593231201,
672
+ "learning_rate": 9.774972851210572e-06,
673
+ "loss": 0.1687,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.5616772306192102,
678
+ "grad_norm": 0.5214592218399048,
679
+ "learning_rate": 9.764710002135784e-06,
680
+ "loss": 0.1497,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.567528035104827,
685
+ "grad_norm": 0.3616654872894287,
686
+ "learning_rate": 9.754223945371524e-06,
687
+ "loss": 0.1617,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.5733788395904437,
692
+ "grad_norm": 0.37656962871551514,
693
+ "learning_rate": 9.743515172147793e-06,
694
+ "loss": 0.1533,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.5792296440760605,
699
+ "grad_norm": 0.4490816593170166,
700
+ "learning_rate": 9.732584184127973e-06,
701
+ "loss": 0.1629,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.5850804485616772,
706
+ "grad_norm": 0.3636768162250519,
707
+ "learning_rate": 9.721431493385322e-06,
708
+ "loss": 0.1493,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.590931253047294,
713
+ "grad_norm": 0.3367384076118469,
714
+ "learning_rate": 9.710057622378992e-06,
715
+ "loss": 0.1582,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.5967820575329108,
720
+ "grad_norm": 0.5017166137695312,
721
+ "learning_rate": 9.698463103929542e-06,
722
+ "loss": 0.1699,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.6026328620185275,
727
+ "grad_norm": 0.3878183960914612,
728
+ "learning_rate": 9.686648481193994e-06,
729
+ "loss": 0.1646,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.6084836665041443,
734
+ "grad_norm": 0.35837483406066895,
735
+ "learning_rate": 9.674614307640368e-06,
736
+ "loss": 0.1637,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.6143344709897611,
741
+ "grad_norm": 0.41536927223205566,
742
+ "learning_rate": 9.66236114702178e-06,
743
+ "loss": 0.1634,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.6201852754753778,
748
+ "grad_norm": 0.3885203003883362,
749
+ "learning_rate": 9.649889573350006e-06,
750
+ "loss": 0.1573,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.6260360799609946,
755
+ "grad_norm": 0.3583751618862152,
756
+ "learning_rate": 9.637200170868607e-06,
757
+ "loss": 0.1372,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.6318868844466115,
762
+ "grad_norm": 0.4037657678127289,
763
+ "learning_rate": 9.62429353402556e-06,
764
+ "loss": 0.1538,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.6377376889322282,
769
+ "grad_norm": 0.35686999559402466,
770
+ "learning_rate": 9.611170267445401e-06,
771
+ "loss": 0.1572,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.643588493417845,
776
+ "grad_norm": 0.37882205843925476,
777
+ "learning_rate": 9.597830985900913e-06,
778
+ "loss": 0.1516,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.6494392979034618,
783
+ "grad_norm": 0.4156239330768585,
784
+ "learning_rate": 9.584276314284316e-06,
785
+ "loss": 0.1633,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.6552901023890785,
790
+ "grad_norm": 0.41733554005622864,
791
+ "learning_rate": 9.570506887577994e-06,
792
+ "loss": 0.1555,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.6611409068746953,
797
+ "grad_norm": 0.350704163312912,
798
+ "learning_rate": 9.556523350824759e-06,
799
+ "loss": 0.1417,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.666991711360312,
804
+ "grad_norm": 0.4148392975330353,
805
+ "learning_rate": 9.542326359097619e-06,
806
+ "loss": 0.158,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.6728425158459288,
811
+ "grad_norm": 0.40368494391441345,
812
+ "learning_rate": 9.527916577469104e-06,
813
+ "loss": 0.1599,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.6786933203315456,
818
+ "grad_norm": 0.41338789463043213,
819
+ "learning_rate": 9.5132946809801e-06,
820
+ "loss": 0.1638,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 0.6845441248171623,
825
+ "grad_norm": 0.3733426332473755,
826
+ "learning_rate": 9.498461354608228e-06,
827
+ "loss": 0.1471,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 0.6903949293027791,
832
+ "grad_norm": 0.3726441264152527,
833
+ "learning_rate": 9.483417293235759e-06,
834
+ "loss": 0.1683,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 0.6962457337883959,
839
+ "grad_norm": 0.375068724155426,
840
+ "learning_rate": 9.468163201617063e-06,
841
+ "loss": 0.1523,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 0.7020965382740126,
846
+ "grad_norm": 0.3736395537853241,
847
+ "learning_rate": 9.452699794345583e-06,
848
+ "loss": 0.15,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 0.7079473427596294,
853
+ "grad_norm": 0.37468886375427246,
854
+ "learning_rate": 9.437027795820373e-06,
855
+ "loss": 0.1598,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 0.7137981472452463,
860
+ "grad_norm": 0.36749985814094543,
861
+ "learning_rate": 9.421147940212152e-06,
862
+ "loss": 0.1605,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 0.719648951730863,
867
+ "grad_norm": 0.3597930669784546,
868
+ "learning_rate": 9.405060971428924e-06,
869
+ "loss": 0.149,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 0.7254997562164798,
874
+ "grad_norm": 0.39379099011421204,
875
+ "learning_rate": 9.388767643081109e-06,
876
+ "loss": 0.159,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 0.7313505607020966,
881
+ "grad_norm": 0.37788447737693787,
882
+ "learning_rate": 9.372268718446259e-06,
883
+ "loss": 0.1691,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 0.7372013651877133,
888
+ "grad_norm": 0.3714412748813629,
889
+ "learning_rate": 9.355564970433288e-06,
890
+ "loss": 0.1571,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 0.7430521696733301,
895
+ "grad_norm": 0.37507206201553345,
896
+ "learning_rate": 9.338657181546277e-06,
897
+ "loss": 0.1567,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 0.7489029741589469,
902
+ "grad_norm": 0.34279665350914,
903
+ "learning_rate": 9.321546143847802e-06,
904
+ "loss": 0.1463,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 0.7547537786445636,
909
+ "grad_norm": 0.4015829265117645,
910
+ "learning_rate": 9.30423265892184e-06,
911
+ "loss": 0.1604,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 0.7606045831301804,
916
+ "grad_norm": 0.36534038186073303,
917
+ "learning_rate": 9.286717537836211e-06,
918
+ "loss": 0.1595,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 0.7664553876157971,
923
+ "grad_norm": 0.35713326930999756,
924
+ "learning_rate": 9.269001601104593e-06,
925
+ "loss": 0.1641,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 0.7723061921014139,
930
+ "grad_norm": 0.393284410238266,
931
+ "learning_rate": 9.251085678648072e-06,
932
+ "loss": 0.1625,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 0.7781569965870307,
937
+ "grad_norm": 0.3619539141654968,
938
+ "learning_rate": 9.232970609756267e-06,
939
+ "loss": 0.1481,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 0.7840078010726474,
944
+ "grad_norm": 0.35219714045524597,
945
+ "learning_rate": 9.214657243048021e-06,
946
+ "loss": 0.1494,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 0.7898586055582643,
951
+ "grad_norm": 0.38188058137893677,
952
+ "learning_rate": 9.196146436431635e-06,
953
+ "loss": 0.161,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 0.7957094100438811,
958
+ "grad_norm": 0.3557851314544678,
959
+ "learning_rate": 9.177439057064684e-06,
960
+ "loss": 0.1571,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 0.8015602145294978,
965
+ "grad_norm": 0.3425387442111969,
966
+ "learning_rate": 9.158535981313395e-06,
967
+ "loss": 0.1429,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 0.8074110190151146,
972
+ "grad_norm": 0.35848724842071533,
973
+ "learning_rate": 9.13943809471159e-06,
974
+ "loss": 0.1592,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 0.8132618235007314,
979
+ "grad_norm": 0.3686351776123047,
980
+ "learning_rate": 9.120146291919206e-06,
981
+ "loss": 0.149,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 0.8191126279863481,
986
+ "grad_norm": 0.3813227713108063,
987
+ "learning_rate": 9.100661476680379e-06,
988
+ "loss": 0.1503,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 0.8249634324719649,
993
+ "grad_norm": 0.3804561495780945,
994
+ "learning_rate": 9.08098456178111e-06,
995
+ "loss": 0.1547,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 0.8308142369575817,
1000
+ "grad_norm": 0.3664409816265106,
1001
+ "learning_rate": 9.061116469006504e-06,
1002
+ "loss": 0.1555,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 0.8366650414431984,
1007
+ "grad_norm": 0.3414985239505768,
1008
+ "learning_rate": 9.041058129097586e-06,
1009
+ "loss": 0.1376,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 0.8425158459288152,
1014
+ "grad_norm": 0.3685246407985687,
1015
+ "learning_rate": 9.020810481707709e-06,
1016
+ "loss": 0.148,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 0.8483666504144319,
1021
+ "grad_norm": 0.3861325681209564,
1022
+ "learning_rate": 9.00037447535852e-06,
1023
+ "loss": 0.1649,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 0.8542174549000487,
1028
+ "grad_norm": 0.38169729709625244,
1029
+ "learning_rate": 8.979751067395534e-06,
1030
+ "loss": 0.1475,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 0.8600682593856656,
1035
+ "grad_norm": 0.39888978004455566,
1036
+ "learning_rate": 8.958941223943292e-06,
1037
+ "loss": 0.1673,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 0.8659190638712823,
1042
+ "grad_norm": 0.38037005066871643,
1043
+ "learning_rate": 8.937945919860086e-06,
1044
+ "loss": 0.1577,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 0.8717698683568991,
1049
+ "grad_norm": 0.37339454889297485,
1050
+ "learning_rate": 8.916766138692303e-06,
1051
+ "loss": 0.1581,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 0.8776206728425159,
1056
+ "grad_norm": 0.39251771569252014,
1057
+ "learning_rate": 8.895402872628352e-06,
1058
+ "loss": 0.1482,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 0.8834714773281326,
1063
+ "grad_norm": 0.33527621626853943,
1064
+ "learning_rate": 8.873857122452174e-06,
1065
+ "loss": 0.1361,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 0.8893222818137494,
1070
+ "grad_norm": 0.380577027797699,
1071
+ "learning_rate": 8.852129897496367e-06,
1072
+ "loss": 0.1495,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 0.8951730862993662,
1077
+ "grad_norm": 0.3689667284488678,
1078
+ "learning_rate": 8.83022221559489e-06,
1079
+ "loss": 0.1605,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 0.9010238907849829,
1084
+ "grad_norm": 0.39303916692733765,
1085
+ "learning_rate": 8.808135103035407e-06,
1086
+ "loss": 0.1472,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 0.9068746952705997,
1091
+ "grad_norm": 0.37502506375312805,
1092
+ "learning_rate": 8.785869594511182e-06,
1093
+ "loss": 0.1621,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 0.9127254997562165,
1098
+ "grad_norm": 0.3729185461997986,
1099
+ "learning_rate": 8.763426733072624e-06,
1100
+ "loss": 0.1538,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 0.9185763042418332,
1105
+ "grad_norm": 0.38975661993026733,
1106
+ "learning_rate": 8.740807570078419e-06,
1107
+ "loss": 0.165,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 0.92442710872745,
1112
+ "grad_norm": 0.3393966555595398,
1113
+ "learning_rate": 8.718013165146275e-06,
1114
+ "loss": 0.1435,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 0.9302779132130667,
1119
+ "grad_norm": 0.3675788938999176,
1120
+ "learning_rate": 8.695044586103297e-06,
1121
+ "loss": 0.1488,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 0.9361287176986836,
1126
+ "grad_norm": 0.34999918937683105,
1127
+ "learning_rate": 8.671902908935942e-06,
1128
+ "loss": 0.139,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 0.9419795221843004,
1133
+ "grad_norm": 0.3778494596481323,
1134
+ "learning_rate": 8.648589217739635e-06,
1135
+ "loss": 0.149,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 0.9478303266699171,
1140
+ "grad_norm": 0.34992629289627075,
1141
+ "learning_rate": 8.625104604667965e-06,
1142
+ "loss": 0.1582,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 0.9536811311555339,
1147
+ "grad_norm": 0.344200074672699,
1148
+ "learning_rate": 8.601450169881533e-06,
1149
+ "loss": 0.1531,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 0.9595319356411507,
1154
+ "grad_norm": 0.3828461170196533,
1155
+ "learning_rate": 8.577627021496413e-06,
1156
+ "loss": 0.1496,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 0.9653827401267674,
1161
+ "grad_norm": 0.38192272186279297,
1162
+ "learning_rate": 8.553636275532236e-06,
1163
+ "loss": 0.1595,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 0.9712335446123842,
1168
+ "grad_norm": 0.346049964427948,
1169
+ "learning_rate": 8.529479055859918e-06,
1170
+ "loss": 0.1394,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 0.977084349098001,
1175
+ "grad_norm": 0.38012537360191345,
1176
+ "learning_rate": 8.505156494148997e-06,
1177
+ "loss": 0.1624,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 0.9829351535836177,
1182
+ "grad_norm": 0.3455771505832672,
1183
+ "learning_rate": 8.480669729814635e-06,
1184
+ "loss": 0.1536,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 0.9887859580692345,
1189
+ "grad_norm": 0.3532484769821167,
1190
+ "learning_rate": 8.456019909964224e-06,
1191
+ "loss": 0.1457,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 0.9946367625548513,
1196
+ "grad_norm": 0.3691612184047699,
1197
+ "learning_rate": 8.43120818934367e-06,
1198
+ "loss": 0.1502,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 1.000487567040468,
1203
+ "grad_norm": 0.39640477299690247,
1204
+ "learning_rate": 8.40623573028327e-06,
1205
+ "loss": 0.1672,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 1.0063383715260847,
1210
+ "grad_norm": 0.3387565314769745,
1211
+ "learning_rate": 8.381103702643295e-06,
1212
+ "loss": 0.0924,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 1.0121891760117017,
1217
+ "grad_norm": 0.3130762577056885,
1218
+ "learning_rate": 8.35581328375915e-06,
1219
+ "loss": 0.0873,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 1.0180399804973184,
1224
+ "grad_norm": 0.3515136241912842,
1225
+ "learning_rate": 8.330365658386252e-06,
1226
+ "loss": 0.0971,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 1.023890784982935,
1231
+ "grad_norm": 0.32018211483955383,
1232
+ "learning_rate": 8.30476201864451e-06,
1233
+ "loss": 0.0848,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 1.029741589468552,
1238
+ "grad_norm": 0.35570186376571655,
1239
+ "learning_rate": 8.27900356396249e-06,
1240
+ "loss": 0.077,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 1.0355923939541687,
1245
+ "grad_norm": 0.3432004749774933,
1246
+ "learning_rate": 8.25309150102121e-06,
1247
+ "loss": 0.0922,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 1.0414431984397854,
1252
+ "grad_norm": 0.3650023341178894,
1253
+ "learning_rate": 8.227027043697642e-06,
1254
+ "loss": 0.1081,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 1.0472940029254023,
1259
+ "grad_norm": 0.3721574544906616,
1260
+ "learning_rate": 8.200811413007808e-06,
1261
+ "loss": 0.0952,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 1.053144807411019,
1266
+ "grad_norm": 0.3511905074119568,
1267
+ "learning_rate": 8.174445837049614e-06,
1268
+ "loss": 0.093,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 1.0589956118966357,
1273
+ "grad_norm": 0.3371464014053345,
1274
+ "learning_rate": 8.147931550945301e-06,
1275
+ "loss": 0.0873,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 1.0648464163822526,
1280
+ "grad_norm": 0.3639083802700043,
1281
+ "learning_rate": 8.121269796783585e-06,
1282
+ "loss": 0.0906,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 1.0706972208678693,
1287
+ "grad_norm": 0.3532220125198364,
1288
+ "learning_rate": 8.094461823561473e-06,
1289
+ "loss": 0.0851,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 1.076548025353486,
1294
+ "grad_norm": 0.3342878520488739,
1295
+ "learning_rate": 8.06750888712576e-06,
1296
+ "loss": 0.0953,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 1.082398829839103,
1301
+ "grad_norm": 0.3148074448108673,
1302
+ "learning_rate": 8.040412250114184e-06,
1303
+ "loss": 0.0837,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 1.0882496343247197,
1308
+ "grad_norm": 0.32519224286079407,
1309
+ "learning_rate": 8.013173181896283e-06,
1310
+ "loss": 0.0798,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 1.0941004388103364,
1315
+ "grad_norm": 0.3312150239944458,
1316
+ "learning_rate": 7.985792958513932e-06,
1317
+ "loss": 0.0933,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 1.0999512432959533,
1322
+ "grad_norm": 0.32089629769325256,
1323
+ "learning_rate": 7.958272862621562e-06,
1324
+ "loss": 0.0922,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 1.10580204778157,
1329
+ "grad_norm": 0.35258930921554565,
1330
+ "learning_rate": 7.930614183426074e-06,
1331
+ "loss": 0.0909,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 1.1116528522671867,
1336
+ "grad_norm": 0.33893248438835144,
1337
+ "learning_rate": 7.902818216626446e-06,
1338
+ "loss": 0.0929,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 1.1175036567528036,
1343
+ "grad_norm": 0.31684961915016174,
1344
+ "learning_rate": 7.874886264353035e-06,
1345
+ "loss": 0.0886,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 1.1233544612384203,
1350
+ "grad_norm": 0.3478744328022003,
1351
+ "learning_rate": 7.846819635106569e-06,
1352
+ "loss": 0.0957,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 1.129205265724037,
1357
+ "grad_norm": 0.3251475393772125,
1358
+ "learning_rate": 7.818619643696863e-06,
1359
+ "loss": 0.0922,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 1.1350560702096537,
1364
+ "grad_norm": 0.29696688055992126,
1365
+ "learning_rate": 7.790287611181217e-06,
1366
+ "loss": 0.0914,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 1.1409068746952706,
1371
+ "grad_norm": 0.3044435977935791,
1372
+ "learning_rate": 7.76182486480253e-06,
1373
+ "loss": 0.0892,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 1.1467576791808873,
1378
+ "grad_norm": 0.3044622242450714,
1379
+ "learning_rate": 7.733232737927123e-06,
1380
+ "loss": 0.0857,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 1.152608483666504,
1385
+ "grad_norm": 0.3341536819934845,
1386
+ "learning_rate": 7.70451256998228e-06,
1387
+ "loss": 0.0906,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 1.158459288152121,
1392
+ "grad_norm": 0.2735297679901123,
1393
+ "learning_rate": 7.675665706393502e-06,
1394
+ "loss": 0.0732,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 1.1643100926377377,
1399
+ "grad_norm": 0.39707475900650024,
1400
+ "learning_rate": 7.646693498521472e-06,
1401
+ "loss": 0.0917,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 1.1701608971233544,
1406
+ "grad_norm": 0.31679078936576843,
1407
+ "learning_rate": 7.617597303598754e-06,
1408
+ "loss": 0.0881,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 1.1760117016089713,
1413
+ "grad_norm": 0.360331267118454,
1414
+ "learning_rate": 7.588378484666214e-06,
1415
+ "loss": 0.0954,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 1.181862506094588,
1420
+ "grad_norm": 0.38201650977134705,
1421
+ "learning_rate": 7.559038410509161e-06,
1422
+ "loss": 0.0904,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 1.1877133105802047,
1427
+ "grad_norm": 0.32335299253463745,
1428
+ "learning_rate": 7.529578455593232e-06,
1429
+ "loss": 0.0945,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 1.1935641150658216,
1434
+ "grad_norm": 0.4359491169452667,
1435
+ "learning_rate": 7.500000000000001e-06,
1436
+ "loss": 0.096,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 1.1994149195514383,
1441
+ "grad_norm": 0.3149729073047638,
1442
+ "learning_rate": 7.47030442936232e-06,
1443
+ "loss": 0.09,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 1.205265724037055,
1448
+ "grad_norm": 0.33973321318626404,
1449
+ "learning_rate": 7.440493134799425e-06,
1450
+ "loss": 0.0913,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 1.211116528522672,
1455
+ "grad_norm": 0.30180642008781433,
1456
+ "learning_rate": 7.4105675128517456e-06,
1457
+ "loss": 0.0803,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 1.2169673330082886,
1462
+ "grad_norm": 0.345156192779541,
1463
+ "learning_rate": 7.380528965415501e-06,
1464
+ "loss": 0.1082,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 1.2228181374939053,
1469
+ "grad_norm": 0.35538923740386963,
1470
+ "learning_rate": 7.35037889967702e-06,
1471
+ "loss": 0.0941,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 1.2286689419795223,
1476
+ "grad_norm": 0.3383888006210327,
1477
+ "learning_rate": 7.320118728046818e-06,
1478
+ "loss": 0.0977,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 1.234519746465139,
1483
+ "grad_norm": 0.32048848271369934,
1484
+ "learning_rate": 7.289749868093432e-06,
1485
+ "loss": 0.0997,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 1.2403705509507557,
1490
+ "grad_norm": 0.30647850036621094,
1491
+ "learning_rate": 7.259273742477017e-06,
1492
+ "loss": 0.0735,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 1.2462213554363726,
1497
+ "grad_norm": 0.3137127757072449,
1498
+ "learning_rate": 7.2286917788826926e-06,
1499
+ "loss": 0.086,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 1.2520721599219893,
1504
+ "grad_norm": 0.3300592005252838,
1505
+ "learning_rate": 7.19800540995367e-06,
1506
+ "loss": 0.1001,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 1.257922964407606,
1511
+ "grad_norm": 0.36561137437820435,
1512
+ "learning_rate": 7.167216073224136e-06,
1513
+ "loss": 0.0939,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 1.263773768893223,
1518
+ "grad_norm": 0.32352930307388306,
1519
+ "learning_rate": 7.136325211051905e-06,
1520
+ "loss": 0.0842,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 1.2696245733788396,
1525
+ "grad_norm": 0.3289717733860016,
1526
+ "learning_rate": 7.1053342705508564e-06,
1527
+ "loss": 0.0935,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 1.2754753778644563,
1532
+ "grad_norm": 0.28205370903015137,
1533
+ "learning_rate": 7.074244703523137e-06,
1534
+ "loss": 0.0778,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 1.2813261823500732,
1539
+ "grad_norm": 0.38977792859077454,
1540
+ "learning_rate": 7.043057966391158e-06,
1541
+ "loss": 0.0956,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 1.28717698683569,
1546
+ "grad_norm": 0.3073525130748749,
1547
+ "learning_rate": 7.011775520129363e-06,
1548
+ "loss": 0.0863,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 1.2930277913213066,
1553
+ "grad_norm": 0.3111942410469055,
1554
+ "learning_rate": 6.980398830195785e-06,
1555
+ "loss": 0.0834,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 1.2988785958069236,
1560
+ "grad_norm": 0.3141816556453705,
1561
+ "learning_rate": 6.948929366463397e-06,
1562
+ "loss": 0.0843,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 1.3047294002925403,
1567
+ "grad_norm": 0.3614446222782135,
1568
+ "learning_rate": 6.9173686031512595e-06,
1569
+ "loss": 0.1021,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 1.310580204778157,
1574
+ "grad_norm": 0.3350818157196045,
1575
+ "learning_rate": 6.885718018755448e-06,
1576
+ "loss": 0.0871,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 1.3164310092637739,
1581
+ "grad_norm": 0.32823216915130615,
1582
+ "learning_rate": 6.8539790959798045e-06,
1583
+ "loss": 0.0916,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 1.3222818137493906,
1588
+ "grad_norm": 0.382249116897583,
1589
+ "learning_rate": 6.822153321666469e-06,
1590
+ "loss": 0.0904,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 1.3281326182350073,
1595
+ "grad_norm": 0.32061290740966797,
1596
+ "learning_rate": 6.790242186726231e-06,
1597
+ "loss": 0.0966,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 1.3339834227206242,
1602
+ "grad_norm": 0.32373279333114624,
1603
+ "learning_rate": 6.758247186068684e-06,
1604
+ "loss": 0.0891,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 1.339834227206241,
1609
+ "grad_norm": 0.32146140933036804,
1610
+ "learning_rate": 6.7261698185322e-06,
1611
+ "loss": 0.09,
1612
+ "step": 229
1613
+ },
1614
+ {
1615
+ "epoch": 1.3456850316918576,
1616
+ "grad_norm": 0.33953243494033813,
1617
+ "learning_rate": 6.6940115868137065e-06,
1618
+ "loss": 0.0877,
1619
+ "step": 230
1620
+ },
1621
+ {
1622
+ "epoch": 1.3515358361774745,
1623
+ "grad_norm": 0.3054323196411133,
1624
+ "learning_rate": 6.6617739973982985e-06,
1625
+ "loss": 0.088,
1626
+ "step": 231
1627
+ },
1628
+ {
1629
+ "epoch": 1.3573866406630912,
1630
+ "grad_norm": 0.34463798999786377,
1631
+ "learning_rate": 6.629458560488664e-06,
1632
+ "loss": 0.1081,
1633
+ "step": 232
1634
+ },
1635
+ {
1636
+ "epoch": 1.363237445148708,
1637
+ "grad_norm": 0.3094691336154938,
1638
+ "learning_rate": 6.597066789934336e-06,
1639
+ "loss": 0.089,
1640
+ "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 1.3690882496343248,
1644
+ "grad_norm": 0.3243754506111145,
1645
+ "learning_rate": 6.5646002031607726e-06,
1646
+ "loss": 0.1017,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 1.3749390541199416,
1651
+ "grad_norm": 0.3523649573326111,
1652
+ "learning_rate": 6.5320603210982745e-06,
1653
+ "loss": 0.0976,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 1.3807898586055583,
1658
+ "grad_norm": 0.32277393341064453,
1659
+ "learning_rate": 6.499448668110735e-06,
1660
+ "loss": 0.0987,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 1.3866406630911752,
1665
+ "grad_norm": 0.36676648259162903,
1666
+ "learning_rate": 6.466766771924231e-06,
1667
+ "loss": 0.0962,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 1.3924914675767919,
1672
+ "grad_norm": 0.3382190465927124,
1673
+ "learning_rate": 6.434016163555452e-06,
1674
+ "loss": 0.0997,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 1.3983422720624086,
1679
+ "grad_norm": 0.3056662082672119,
1680
+ "learning_rate": 6.401198377239979e-06,
1681
+ "loss": 0.0832,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 1.4041930765480253,
1686
+ "grad_norm": 0.34704071283340454,
1687
+ "learning_rate": 6.368314950360416e-06,
1688
+ "loss": 0.0989,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 1.4100438810336422,
1693
+ "grad_norm": 0.3333982527256012,
1694
+ "learning_rate": 6.3353674233743585e-06,
1695
+ "loss": 0.1005,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 1.415894685519259,
1700
+ "grad_norm": 0.3286687135696411,
1701
+ "learning_rate": 6.302357339742245e-06,
1702
+ "loss": 0.089,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 1.4217454900048756,
1707
+ "grad_norm": 0.33592113852500916,
1708
+ "learning_rate": 6.269286245855039e-06,
1709
+ "loss": 0.0927,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 1.4275962944904925,
1714
+ "grad_norm": 0.3280174434185028,
1715
+ "learning_rate": 6.236155690961795e-06,
1716
+ "loss": 0.085,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 1.4334470989761092,
1721
+ "grad_norm": 0.31295254826545715,
1722
+ "learning_rate": 6.202967227097073e-06,
1723
+ "loss": 0.0944,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 1.439297903461726,
1728
+ "grad_norm": 0.3453296422958374,
1729
+ "learning_rate": 6.169722409008244e-06,
1730
+ "loss": 0.0878,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 1.4451487079473426,
1735
+ "grad_norm": 0.3456035852432251,
1736
+ "learning_rate": 6.136422794082645e-06,
1737
+ "loss": 0.0943,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 1.4509995124329595,
1742
+ "grad_norm": 0.31323128938674927,
1743
+ "learning_rate": 6.10306994227463e-06,
1744
+ "loss": 0.0759,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 1.4568503169185762,
1749
+ "grad_norm": 0.33041155338287354,
1750
+ "learning_rate": 6.0696654160324875e-06,
1751
+ "loss": 0.098,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 1.462701121404193,
1756
+ "grad_norm": 0.3976047933101654,
1757
+ "learning_rate": 6.0362107802252486e-06,
1758
+ "loss": 0.1005,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 1.4685519258898099,
1763
+ "grad_norm": 0.3161430358886719,
1764
+ "learning_rate": 6.002707602069377e-06,
1765
+ "loss": 0.0909,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 1.4744027303754266,
1770
+ "grad_norm": 0.36212068796157837,
1771
+ "learning_rate": 5.9691574510553505e-06,
1772
+ "loss": 0.1077,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 1.4802535348610433,
1777
+ "grad_norm": 0.3128092586994171,
1778
+ "learning_rate": 5.935561898874142e-06,
1779
+ "loss": 0.0965,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 1.4861043393466602,
1784
+ "grad_norm": 0.29555660486221313,
1785
+ "learning_rate": 5.901922519343586e-06,
1786
+ "loss": 0.0938,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 1.491955143832277,
1791
+ "grad_norm": 0.3516538143157959,
1792
+ "learning_rate": 5.8682408883346535e-06,
1793
+ "loss": 0.0948,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 1.4978059483178936,
1798
+ "grad_norm": 0.33568522334098816,
1799
+ "learning_rate": 5.834518583697628e-06,
1800
+ "loss": 0.1097,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 1.5036567528035105,
1805
+ "grad_norm": 0.3301846385002136,
1806
+ "learning_rate": 5.800757185188195e-06,
1807
+ "loss": 0.096,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 1.5095075572891272,
1812
+ "grad_norm": 0.36975666880607605,
1813
+ "learning_rate": 5.766958274393428e-06,
1814
+ "loss": 0.1021,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 1.515358361774744,
1819
+ "grad_norm": 0.36187541484832764,
1820
+ "learning_rate": 5.733123434657704e-06,
1821
+ "loss": 0.0978,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 1.5212091662603608,
1826
+ "grad_norm": 0.302735298871994,
1827
+ "learning_rate": 5.699254251008524e-06,
1828
+ "loss": 0.0863,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 1.5270599707459775,
1833
+ "grad_norm": 0.3669283986091614,
1834
+ "learning_rate": 5.66535231008227e-06,
1835
+ "loss": 0.0993,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 1.5329107752315942,
1840
+ "grad_norm": 0.30971550941467285,
1841
+ "learning_rate": 5.631419200049867e-06,
1842
+ "loss": 0.094,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 1.5387615797172112,
1847
+ "grad_norm": 0.31271669268608093,
1848
+ "learning_rate": 5.597456510542395e-06,
1849
+ "loss": 0.0784,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 1.5446123842028279,
1854
+ "grad_norm": 0.3437775671482086,
1855
+ "learning_rate": 5.5634658325766066e-06,
1856
+ "loss": 0.094,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 1.5504631886884446,
1861
+ "grad_norm": 0.30059608817100525,
1862
+ "learning_rate": 5.529448758480408e-06,
1863
+ "loss": 0.0837,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 1.5563139931740615,
1868
+ "grad_norm": 0.3427652418613434,
1869
+ "learning_rate": 5.495406881818256e-06,
1870
+ "loss": 0.0974,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 1.5621647976596782,
1875
+ "grad_norm": 0.325862318277359,
1876
+ "learning_rate": 5.46134179731651e-06,
1877
+ "loss": 0.0941,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 1.568015602145295,
1882
+ "grad_norm": 0.2897391617298126,
1883
+ "learning_rate": 5.427255100788726e-06,
1884
+ "loss": 0.0892,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 1.5738664066309118,
1889
+ "grad_norm": 0.33088555932044983,
1890
+ "learning_rate": 5.393148389060893e-06,
1891
+ "loss": 0.09,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 1.5797172111165285,
1896
+ "grad_norm": 0.3650970757007599,
1897
+ "learning_rate": 5.359023259896638e-06,
1898
+ "loss": 0.0931,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 1.5855680156021452,
1903
+ "grad_norm": 0.34107041358947754,
1904
+ "learning_rate": 5.3248813119223665e-06,
1905
+ "loss": 0.0937,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 1.5914188200877621,
1910
+ "grad_norm": 0.3320484459400177,
1911
+ "learning_rate": 5.290724144552379e-06,
1912
+ "loss": 0.1058,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 1.5972696245733788,
1917
+ "grad_norm": 0.31170666217803955,
1918
+ "learning_rate": 5.2565533579139484e-06,
1919
+ "loss": 0.0923,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 1.6031204290589955,
1924
+ "grad_norm": 0.3246054947376251,
1925
+ "learning_rate": 5.222370552772353e-06,
1926
+ "loss": 0.0999,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 1.6089712335446125,
1931
+ "grad_norm": 0.35101330280303955,
1932
+ "learning_rate": 5.188177330455886e-06,
1933
+ "loss": 0.1082,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 1.6148220380302292,
1938
+ "grad_norm": 0.29805803298950195,
1939
+ "learning_rate": 5.153975292780852e-06,
1940
+ "loss": 0.0867,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 1.6206728425158459,
1945
+ "grad_norm": 0.32440170645713806,
1946
+ "learning_rate": 5.119766041976516e-06,
1947
+ "loss": 0.0936,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 1.6265236470014628,
1952
+ "grad_norm": 0.33313047885894775,
1953
+ "learning_rate": 5.085551180610046e-06,
1954
+ "loss": 0.0929,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 1.6323744514870795,
1959
+ "grad_norm": 0.3095255494117737,
1960
+ "learning_rate": 5.05133231151145e-06,
1961
+ "loss": 0.0847,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 1.6382252559726962,
1966
+ "grad_norm": 0.3372449278831482,
1967
+ "learning_rate": 5.017111037698477e-06,
1968
+ "loss": 0.104,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 1.6494392979034618,
1973
+ "grad_norm": 0.32717257738113403,
1974
+ "learning_rate": 4.9828889623015265e-06,
1975
+ "loss": 0.0863,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 1.6552901023890785,
1980
+ "grad_norm": 0.3508462607860565,
1981
+ "learning_rate": 4.948667688488552e-06,
1982
+ "loss": 0.0937,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 1.6611409068746954,
1987
+ "grad_norm": 0.3077023923397064,
1988
+ "learning_rate": 4.9144488193899546e-06,
1989
+ "loss": 0.0933,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 1.6669917113603119,
1994
+ "grad_norm": 0.3831559419631958,
1995
+ "learning_rate": 4.880233958023486e-06,
1996
+ "loss": 0.1023,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 1.6728425158459288,
2001
+ "grad_norm": 0.31356075406074524,
2002
+ "learning_rate": 4.846024707219149e-06,
2003
+ "loss": 0.088,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 1.6786933203315457,
2008
+ "grad_norm": 0.35236048698425293,
2009
+ "learning_rate": 4.811822669544115e-06,
2010
+ "loss": 0.0942,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 1.6845441248171622,
2015
+ "grad_norm": 0.32862260937690735,
2016
+ "learning_rate": 4.777629447227649e-06,
2017
+ "loss": 0.1004,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 1.6903949293027791,
2022
+ "grad_norm": 0.29887956380844116,
2023
+ "learning_rate": 4.7434466420860515e-06,
2024
+ "loss": 0.0838,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 1.696245733788396,
2029
+ "grad_norm": 0.38082054257392883,
2030
+ "learning_rate": 4.7092758554476215e-06,
2031
+ "loss": 0.0955,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 1.7020965382740125,
2036
+ "grad_norm": 0.3033042848110199,
2037
+ "learning_rate": 4.675118688077634e-06,
2038
+ "loss": 0.0907,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 1.7079473427596294,
2043
+ "grad_norm": 0.3312883973121643,
2044
+ "learning_rate": 4.640976740103363e-06,
2045
+ "loss": 0.0922,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 1.7137981472452464,
2050
+ "grad_norm": 0.3242630064487457,
2051
+ "learning_rate": 4.606851610939108e-06,
2052
+ "loss": 0.0918,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 1.7196489517308629,
2057
+ "grad_norm": 0.3354322612285614,
2058
+ "learning_rate": 4.572744899211275e-06,
2059
+ "loss": 0.0988,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 1.7254997562164798,
2064
+ "grad_norm": 0.3046918213367462,
2065
+ "learning_rate": 4.53865820268349e-06,
2066
+ "loss": 0.0829,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 1.7313505607020967,
2071
+ "grad_norm": 0.3330130875110626,
2072
+ "learning_rate": 4.504593118181745e-06,
2073
+ "loss": 0.1022,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 1.7372013651877132,
2078
+ "grad_norm": 0.31168803572654724,
2079
+ "learning_rate": 4.470551241519594e-06,
2080
+ "loss": 0.0841,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 1.74305216967333,
2085
+ "grad_norm": 0.3428613841533661,
2086
+ "learning_rate": 4.436534167423395e-06,
2087
+ "loss": 0.0921,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 1.748902974158947,
2092
+ "grad_norm": 0.33911219239234924,
2093
+ "learning_rate": 4.402543489457607e-06,
2094
+ "loss": 0.0969,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 1.7547537786445635,
2099
+ "grad_norm": 0.3178160488605499,
2100
+ "learning_rate": 4.368580799950133e-06,
2101
+ "loss": 0.0884,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 1.7606045831301804,
2106
+ "grad_norm": 0.3277307152748108,
2107
+ "learning_rate": 4.334647689917734e-06,
2108
+ "loss": 0.0974,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 1.7664553876157971,
2113
+ "grad_norm": 0.3535825312137604,
2114
+ "learning_rate": 4.300745748991478e-06,
2115
+ "loss": 0.0896,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 1.7723061921014138,
2120
+ "grad_norm": 0.32610517740249634,
2121
+ "learning_rate": 4.266876565342298e-06,
2122
+ "loss": 0.0994,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 1.7781569965870307,
2127
+ "grad_norm": 0.3436540961265564,
2128
+ "learning_rate": 4.233041725606573e-06,
2129
+ "loss": 0.0968,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 1.7840078010726474,
2134
+ "grad_norm": 0.35339972376823425,
2135
+ "learning_rate": 4.199242814811807e-06,
2136
+ "loss": 0.0964,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 1.7898586055582641,
2141
+ "grad_norm": 0.3242701292037964,
2142
+ "learning_rate": 4.1654814163023735e-06,
2143
+ "loss": 0.0817,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 1.795709410043881,
2148
+ "grad_norm": 0.38222551345825195,
2149
+ "learning_rate": 4.131759111665349e-06,
2150
+ "loss": 0.099,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 1.8015602145294978,
2155
+ "grad_norm": 0.32425937056541443,
2156
+ "learning_rate": 4.098077480656415e-06,
2157
+ "loss": 0.0873,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 1.8074110190151145,
2162
+ "grad_norm": 0.36380070447921753,
2163
+ "learning_rate": 4.064438101125859e-06,
2164
+ "loss": 0.0973,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 1.8132618235007314,
2169
+ "grad_norm": 0.34305286407470703,
2170
+ "learning_rate": 4.03084254894465e-06,
2171
+ "loss": 0.0922,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 1.819112627986348,
2176
+ "grad_norm": 0.353582501411438,
2177
+ "learning_rate": 3.997292397930624e-06,
2178
+ "loss": 0.0872,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 1.8249634324719648,
2183
+ "grad_norm": 0.29907920956611633,
2184
+ "learning_rate": 3.963789219774753e-06,
2185
+ "loss": 0.0845,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 1.8308142369575817,
2190
+ "grad_norm": 0.3310573399066925,
2191
+ "learning_rate": 3.930334583967514e-06,
2192
+ "loss": 0.0941,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 1.8366650414431984,
2197
+ "grad_norm": 0.3727741241455078,
2198
+ "learning_rate": 3.896930057725372e-06,
2199
+ "loss": 0.0807,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 1.8425158459288151,
2204
+ "grad_norm": 0.3489404022693634,
2205
+ "learning_rate": 3.863577205917356e-06,
2206
+ "loss": 0.1051,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 1.848366650414432,
2211
+ "grad_norm": 0.3304194509983063,
2212
+ "learning_rate": 3.8302775909917585e-06,
2213
+ "loss": 0.0937,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 1.8542174549000487,
2218
+ "grad_norm": 0.31810325384140015,
2219
+ "learning_rate": 3.7970327729029288e-06,
2220
+ "loss": 0.0917,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 1.8600682593856654,
2225
+ "grad_norm": 0.3284529149532318,
2226
+ "learning_rate": 3.7638443090382067e-06,
2227
+ "loss": 0.0864,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 1.8659190638712824,
2232
+ "grad_norm": 0.34479424357414246,
2233
+ "learning_rate": 3.730713754144961e-06,
2234
+ "loss": 0.0938,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 1.871769868356899,
2239
+ "grad_norm": 0.32104262709617615,
2240
+ "learning_rate": 3.6976426602577565e-06,
2241
+ "loss": 0.0866,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 1.8776206728425158,
2246
+ "grad_norm": 0.32675501704216003,
2247
+ "learning_rate": 3.6646325766256423e-06,
2248
+ "loss": 0.0841,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 1.8834714773281327,
2253
+ "grad_norm": 0.3269306421279907,
2254
+ "learning_rate": 3.6316850496395863e-06,
2255
+ "loss": 0.0926,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 1.8893222818137494,
2260
+ "grad_norm": 0.32884150743484497,
2261
+ "learning_rate": 3.598801622760021e-06,
2262
+ "loss": 0.0816,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 1.895173086299366,
2267
+ "grad_norm": 0.3181195557117462,
2268
+ "learning_rate": 3.5659838364445505e-06,
2269
+ "loss": 0.0887,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 1.901023890784983,
2274
+ "grad_norm": 0.3327188789844513,
2275
+ "learning_rate": 3.5332332280757706e-06,
2276
+ "loss": 0.0966,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 1.9068746952705997,
2281
+ "grad_norm": 0.3728068470954895,
2282
+ "learning_rate": 3.5005513318892666e-06,
2283
+ "loss": 0.0991,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 1.9127254997562164,
2288
+ "grad_norm": 0.37355464696884155,
2289
+ "learning_rate": 3.4679396789017263e-06,
2290
+ "loss": 0.0867,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 1.9185763042418333,
2295
+ "grad_norm": 0.32492557168006897,
2296
+ "learning_rate": 3.4353997968392295e-06,
2297
+ "loss": 0.0894,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 1.92442710872745,
2302
+ "grad_norm": 0.322022944688797,
2303
+ "learning_rate": 3.402933210065665e-06,
2304
+ "loss": 0.0894,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 1.9302779132130667,
2309
+ "grad_norm": 0.31964507699012756,
2310
+ "learning_rate": 3.3705414395113354e-06,
2311
+ "loss": 0.0918,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 1.9361287176986837,
2316
+ "grad_norm": 0.33390405774116516,
2317
+ "learning_rate": 3.3382260026017027e-06,
2318
+ "loss": 0.0884,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 1.9419795221843004,
2323
+ "grad_norm": 0.33786076307296753,
2324
+ "learning_rate": 3.305988413186295e-06,
2325
+ "loss": 0.0872,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 1.947830326669917,
2330
+ "grad_norm": 0.3263327181339264,
2331
+ "learning_rate": 3.2738301814678015e-06,
2332
+ "loss": 0.0922,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 1.953681131155534,
2337
+ "grad_norm": 0.3341035544872284,
2338
+ "learning_rate": 3.241752813931316e-06,
2339
+ "loss": 0.0855,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 1.9595319356411507,
2344
+ "grad_norm": 0.38113948702812195,
2345
+ "learning_rate": 3.2097578132737716e-06,
2346
+ "loss": 0.1033,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 1.9653827401267674,
2351
+ "grad_norm": 0.33053913712501526,
2352
+ "learning_rate": 3.1778466783335328e-06,
2353
+ "loss": 0.0958,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 1.9712335446123843,
2358
+ "grad_norm": 0.34964510798454285,
2359
+ "learning_rate": 3.1460209040201967e-06,
2360
+ "loss": 0.0886,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 1.977084349098001,
2365
+ "grad_norm": 0.3344588279724121,
2366
+ "learning_rate": 3.114281981244553e-06,
2367
+ "loss": 0.0925,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 1.9829351535836177,
2372
+ "grad_norm": 0.32202234864234924,
2373
+ "learning_rate": 3.082631396848743e-06,
2374
+ "loss": 0.0954,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 1.9887859580692346,
2379
+ "grad_norm": 0.3456474542617798,
2380
+ "learning_rate": 3.0510706335366034e-06,
2381
+ "loss": 0.0856,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 1.9946367625548513,
2386
+ "grad_norm": 0.3227405250072479,
2387
+ "learning_rate": 3.019601169804216e-06,
2388
+ "loss": 0.0878,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 2.000487567040468,
2393
+ "grad_norm": 0.31147223711013794,
2394
+ "learning_rate": 2.9882244798706372e-06,
2395
+ "loss": 0.0759,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 2.006338371526085,
2400
+ "grad_norm": 0.2763703763484955,
2401
+ "learning_rate": 2.956942033608843e-06,
2402
+ "loss": 0.0525,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 2.0121891760117014,
2407
+ "grad_norm": 0.29425641894340515,
2408
+ "learning_rate": 2.9257552964768644e-06,
2409
+ "loss": 0.0601,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 2.0180399804973184,
2414
+ "grad_norm": 0.2795559763908386,
2415
+ "learning_rate": 2.8946657294491452e-06,
2416
+ "loss": 0.0573,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 2.0238907849829353,
2421
+ "grad_norm": 0.34892964363098145,
2422
+ "learning_rate": 2.863674788948097e-06,
2423
+ "loss": 0.0628,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 2.0297415894685518,
2428
+ "grad_norm": 0.28666457533836365,
2429
+ "learning_rate": 2.832783926775865e-06,
2430
+ "loss": 0.0524,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 2.0355923939541687,
2435
+ "grad_norm": 0.26126629114151,
2436
+ "learning_rate": 2.8019945900463307e-06,
2437
+ "loss": 0.0578,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 2.0414431984397856,
2442
+ "grad_norm": 0.2673165500164032,
2443
+ "learning_rate": 2.771308221117309e-06,
2444
+ "loss": 0.0574,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 2.047294002925402,
2449
+ "grad_norm": 0.2941865921020508,
2450
+ "learning_rate": 2.740726257522987e-06,
2451
+ "loss": 0.0551,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 2.053144807411019,
2456
+ "grad_norm": 0.2612977623939514,
2457
+ "learning_rate": 2.7102501319065706e-06,
2458
+ "loss": 0.0644,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 2.058995611896636,
2463
+ "grad_norm": 0.2891154885292053,
2464
+ "learning_rate": 2.6798812719531843e-06,
2465
+ "loss": 0.0563,
2466
+ "step": 351
2467
+ },
2468
+ {
2469
+ "epoch": 2.0648464163822524,
2470
+ "grad_norm": 0.2692639231681824,
2471
+ "learning_rate": 2.6496211003229795e-06,
2472
+ "loss": 0.0541,
2473
+ "step": 352
2474
+ },
2475
+ {
2476
+ "epoch": 2.0706972208678693,
2477
+ "grad_norm": 0.3014177680015564,
2478
+ "learning_rate": 2.6194710345845e-06,
2479
+ "loss": 0.0502,
2480
+ "step": 353
2481
+ },
2482
+ {
2483
+ "epoch": 2.0765480253534863,
2484
+ "grad_norm": 0.28264397382736206,
2485
+ "learning_rate": 2.5894324871482557e-06,
2486
+ "loss": 0.0566,
2487
+ "step": 354
2488
+ },
2489
+ {
2490
+ "epoch": 2.0823988298391027,
2491
+ "grad_norm": 0.2884339392185211,
2492
+ "learning_rate": 2.559506865200576e-06,
2493
+ "loss": 0.0527,
2494
+ "step": 355
2495
+ },
2496
+ {
2497
+ "epoch": 2.0882496343247197,
2498
+ "grad_norm": 0.26103538274765015,
2499
+ "learning_rate": 2.529695570637679e-06,
2500
+ "loss": 0.0596,
2501
+ "step": 356
2502
+ },
2503
+ {
2504
+ "epoch": 2.0941004388103366,
2505
+ "grad_norm": 0.2645220160484314,
2506
+ "learning_rate": 2.5000000000000015e-06,
2507
+ "loss": 0.0564,
2508
+ "step": 357
2509
+ },
2510
+ {
2511
+ "epoch": 2.099951243295953,
2512
+ "grad_norm": 0.3041948080062866,
2513
+ "learning_rate": 2.4704215444067684e-06,
2514
+ "loss": 0.0529,
2515
+ "step": 358
2516
+ },
2517
+ {
2518
+ "epoch": 2.10580204778157,
2519
+ "grad_norm": 0.26852619647979736,
2520
+ "learning_rate": 2.4409615894908407e-06,
2521
+ "loss": 0.0551,
2522
+ "step": 359
2523
+ },
2524
+ {
2525
+ "epoch": 2.111652852267187,
2526
+ "grad_norm": 0.2973073422908783,
2527
+ "learning_rate": 2.411621515333788e-06,
2528
+ "loss": 0.0616,
2529
+ "step": 360
2530
+ },
2531
+ {
2532
+ "epoch": 2.1175036567528034,
2533
+ "grad_norm": 0.2823100686073303,
2534
+ "learning_rate": 2.3824026964012487e-06,
2535
+ "loss": 0.0616,
2536
+ "step": 361
2537
+ },
2538
+ {
2539
+ "epoch": 2.1233544612384203,
2540
+ "grad_norm": 0.25822895765304565,
2541
+ "learning_rate": 2.35330650147853e-06,
2542
+ "loss": 0.0512,
2543
+ "step": 362
2544
+ },
2545
+ {
2546
+ "epoch": 2.1292052657240372,
2547
+ "grad_norm": 0.34585779905319214,
2548
+ "learning_rate": 2.324334293606499e-06,
2549
+ "loss": 0.0495,
2550
+ "step": 363
2551
+ },
2552
+ {
2553
+ "epoch": 2.1350560702096537,
2554
+ "grad_norm": 0.2767939567565918,
2555
+ "learning_rate": 2.2954874300177197e-06,
2556
+ "loss": 0.0561,
2557
+ "step": 364
2558
+ },
2559
+ {
2560
+ "epoch": 2.1409068746952706,
2561
+ "grad_norm": 0.27340444922447205,
2562
+ "learning_rate": 2.266767262072878e-06,
2563
+ "loss": 0.0562,
2564
+ "step": 365
2565
+ },
2566
+ {
2567
+ "epoch": 2.1467576791808876,
2568
+ "grad_norm": 0.273384690284729,
2569
+ "learning_rate": 2.238175135197471e-06,
2570
+ "loss": 0.0593,
2571
+ "step": 366
2572
+ },
2573
+ {
2574
+ "epoch": 2.152608483666504,
2575
+ "grad_norm": 0.28088316321372986,
2576
+ "learning_rate": 2.2097123888187825e-06,
2577
+ "loss": 0.0505,
2578
+ "step": 367
2579
+ },
2580
+ {
2581
+ "epoch": 2.158459288152121,
2582
+ "grad_norm": 0.2613745331764221,
2583
+ "learning_rate": 2.181380356303139e-06,
2584
+ "loss": 0.0525,
2585
+ "step": 368
2586
+ },
2587
+ {
2588
+ "epoch": 2.164310092637738,
2589
+ "grad_norm": 0.2488614320755005,
2590
+ "learning_rate": 2.1531803648934333e-06,
2591
+ "loss": 0.0498,
2592
+ "step": 369
2593
+ },
2594
+ {
2595
+ "epoch": 2.1701608971233544,
2596
+ "grad_norm": 0.2990882098674774,
2597
+ "learning_rate": 2.1251137356469677e-06,
2598
+ "loss": 0.0551,
2599
+ "step": 370
2600
+ },
2601
+ {
2602
+ "epoch": 2.1760117016089713,
2603
+ "grad_norm": 0.30025023221969604,
2604
+ "learning_rate": 2.0971817833735548e-06,
2605
+ "loss": 0.055,
2606
+ "step": 371
2607
+ },
2608
+ {
2609
+ "epoch": 2.181862506094588,
2610
+ "grad_norm": 0.2645871639251709,
2611
+ "learning_rate": 2.069385816573928e-06,
2612
+ "loss": 0.0564,
2613
+ "step": 372
2614
+ },
2615
+ {
2616
+ "epoch": 2.1877133105802047,
2617
+ "grad_norm": 0.29934531450271606,
2618
+ "learning_rate": 2.0417271373784403e-06,
2619
+ "loss": 0.0613,
2620
+ "step": 373
2621
+ },
2622
+ {
2623
+ "epoch": 2.1935641150658216,
2624
+ "grad_norm": 0.2638642489910126,
2625
+ "learning_rate": 2.0142070414860704e-06,
2626
+ "loss": 0.0539,
2627
+ "step": 374
2628
+ },
2629
+ {
2630
+ "epoch": 2.1994149195514385,
2631
+ "grad_norm": 0.2566313147544861,
2632
+ "learning_rate": 1.9868268181037186e-06,
2633
+ "loss": 0.054,
2634
+ "step": 375
2635
+ },
2636
+ {
2637
+ "epoch": 2.205265724037055,
2638
+ "grad_norm": 0.274949312210083,
2639
+ "learning_rate": 1.9595877498858175e-06,
2640
+ "loss": 0.0644,
2641
+ "step": 376
2642
+ },
2643
+ {
2644
+ "epoch": 2.211116528522672,
2645
+ "grad_norm": 0.21347293257713318,
2646
+ "learning_rate": 1.9324911128742406e-06,
2647
+ "loss": 0.046,
2648
+ "step": 377
2649
+ },
2650
+ {
2651
+ "epoch": 2.216967333008289,
2652
+ "grad_norm": 0.296029657125473,
2653
+ "learning_rate": 1.9055381764385272e-06,
2654
+ "loss": 0.0674,
2655
+ "step": 378
2656
+ },
2657
+ {
2658
+ "epoch": 2.2228181374939053,
2659
+ "grad_norm": 0.3009437024593353,
2660
+ "learning_rate": 1.8787302032164168e-06,
2661
+ "loss": 0.0635,
2662
+ "step": 379
2663
+ },
2664
+ {
2665
+ "epoch": 2.2286689419795223,
2666
+ "grad_norm": 0.2573295831680298,
2667
+ "learning_rate": 1.8520684490547014e-06,
2668
+ "loss": 0.0621,
2669
+ "step": 380
2670
+ },
2671
+ {
2672
+ "epoch": 2.234519746465139,
2673
+ "grad_norm": 0.28992000222206116,
2674
+ "learning_rate": 1.8255541629503865e-06,
2675
+ "loss": 0.051,
2676
+ "step": 381
2677
+ },
2678
+ {
2679
+ "epoch": 2.2403705509507557,
2680
+ "grad_norm": 0.2807430028915405,
2681
+ "learning_rate": 1.7991885869921928e-06,
2682
+ "loss": 0.056,
2683
+ "step": 382
2684
+ },
2685
+ {
2686
+ "epoch": 2.2462213554363726,
2687
+ "grad_norm": 0.2580874562263489,
2688
+ "learning_rate": 1.7729729563023613e-06,
2689
+ "loss": 0.0576,
2690
+ "step": 383
2691
+ },
2692
+ {
2693
+ "epoch": 2.2520721599219895,
2694
+ "grad_norm": 0.27799367904663086,
2695
+ "learning_rate": 1.746908498978791e-06,
2696
+ "loss": 0.0491,
2697
+ "step": 384
2698
+ },
2699
+ {
2700
+ "epoch": 2.257922964407606,
2701
+ "grad_norm": 0.28631195425987244,
2702
+ "learning_rate": 1.7209964360375137e-06,
2703
+ "loss": 0.0516,
2704
+ "step": 385
2705
+ },
2706
+ {
2707
+ "epoch": 2.263773768893223,
2708
+ "grad_norm": 0.2720945477485657,
2709
+ "learning_rate": 1.6952379813554914e-06,
2710
+ "loss": 0.0525,
2711
+ "step": 386
2712
+ },
2713
+ {
2714
+ "epoch": 2.26962457337884,
2715
+ "grad_norm": 0.2580566108226776,
2716
+ "learning_rate": 1.6696343416137495e-06,
2717
+ "loss": 0.0599,
2718
+ "step": 387
2719
+ },
2720
+ {
2721
+ "epoch": 2.2754753778644563,
2722
+ "grad_norm": 0.2627420723438263,
2723
+ "learning_rate": 1.6441867162408514e-06,
2724
+ "loss": 0.0531,
2725
+ "step": 388
2726
+ },
2727
+ {
2728
+ "epoch": 2.2813261823500732,
2729
+ "grad_norm": 0.2853785753250122,
2730
+ "learning_rate": 1.6188962973567068e-06,
2731
+ "loss": 0.0563,
2732
+ "step": 389
2733
+ },
2734
+ {
2735
+ "epoch": 2.28717698683569,
2736
+ "grad_norm": 0.26825377345085144,
2737
+ "learning_rate": 1.5937642697167288e-06,
2738
+ "loss": 0.062,
2739
+ "step": 390
2740
+ },
2741
+ {
2742
+ "epoch": 2.2930277913213066,
2743
+ "grad_norm": 0.24329300224781036,
2744
+ "learning_rate": 1.5687918106563326e-06,
2745
+ "loss": 0.0476,
2746
+ "step": 391
2747
+ },
2748
+ {
2749
+ "epoch": 2.2988785958069236,
2750
+ "grad_norm": 0.23808631300926208,
2751
+ "learning_rate": 1.5439800900357765e-06,
2752
+ "loss": 0.0514,
2753
+ "step": 392
2754
+ },
2755
+ {
2756
+ "epoch": 2.30472940029254,
2757
+ "grad_norm": 0.26007384061813354,
2758
+ "learning_rate": 1.5193302701853674e-06,
2759
+ "loss": 0.0478,
2760
+ "step": 393
2761
+ },
2762
+ {
2763
+ "epoch": 2.310580204778157,
2764
+ "grad_norm": 0.3034699261188507,
2765
+ "learning_rate": 1.4948435058510036e-06,
2766
+ "loss": 0.0596,
2767
+ "step": 394
2768
+ },
2769
+ {
2770
+ "epoch": 2.316431009263774,
2771
+ "grad_norm": 0.2601581811904907,
2772
+ "learning_rate": 1.4705209441400841e-06,
2773
+ "loss": 0.0529,
2774
+ "step": 395
2775
+ },
2776
+ {
2777
+ "epoch": 2.3222818137493904,
2778
+ "grad_norm": 0.26665881276130676,
2779
+ "learning_rate": 1.4463637244677648e-06,
2780
+ "loss": 0.0516,
2781
+ "step": 396
2782
+ },
2783
+ {
2784
+ "epoch": 2.3281326182350073,
2785
+ "grad_norm": 0.252401202917099,
2786
+ "learning_rate": 1.422372978503589e-06,
2787
+ "loss": 0.0568,
2788
+ "step": 397
2789
+ },
2790
+ {
2791
+ "epoch": 2.333983422720624,
2792
+ "grad_norm": 0.2799394726753235,
2793
+ "learning_rate": 1.3985498301184685e-06,
2794
+ "loss": 0.0544,
2795
+ "step": 398
2796
+ },
2797
+ {
2798
+ "epoch": 2.3398342272062407,
2799
+ "grad_norm": 0.29879605770111084,
2800
+ "learning_rate": 1.374895395332037e-06,
2801
+ "loss": 0.06,
2802
+ "step": 399
2803
+ },
2804
+ {
2805
+ "epoch": 2.3456850316918576,
2806
+ "grad_norm": 0.27037307620048523,
2807
+ "learning_rate": 1.351410782260366e-06,
2808
+ "loss": 0.0601,
2809
+ "step": 400
2810
+ },
2811
+ {
2812
+ "epoch": 2.3515358361774745,
2813
+ "grad_norm": 0.26528164744377136,
2814
+ "learning_rate": 1.3280970910640573e-06,
2815
+ "loss": 0.0547,
2816
+ "step": 401
2817
+ },
2818
+ {
2819
+ "epoch": 2.357386640663091,
2820
+ "grad_norm": 0.3015322983264923,
2821
+ "learning_rate": 1.3049554138967052e-06,
2822
+ "loss": 0.0586,
2823
+ "step": 402
2824
+ },
2825
+ {
2826
+ "epoch": 2.363237445148708,
2827
+ "grad_norm": 0.2491970807313919,
2828
+ "learning_rate": 1.2819868348537263e-06,
2829
+ "loss": 0.0492,
2830
+ "step": 403
2831
+ },
2832
+ {
2833
+ "epoch": 2.369088249634325,
2834
+ "grad_norm": 0.262861967086792,
2835
+ "learning_rate": 1.259192429921584e-06,
2836
+ "loss": 0.0547,
2837
+ "step": 404
2838
+ },
2839
+ {
2840
+ "epoch": 2.3749390541199413,
2841
+ "grad_norm": 0.2699519991874695,
2842
+ "learning_rate": 1.2365732669273778e-06,
2843
+ "loss": 0.0511,
2844
+ "step": 405
2845
+ },
2846
+ {
2847
+ "epoch": 2.3807898586055583,
2848
+ "grad_norm": 0.30865174531936646,
2849
+ "learning_rate": 1.2141304054888204e-06,
2850
+ "loss": 0.0583,
2851
+ "step": 406
2852
+ },
2853
+ {
2854
+ "epoch": 2.386640663091175,
2855
+ "grad_norm": 0.28503602743148804,
2856
+ "learning_rate": 1.1918648969645947e-06,
2857
+ "loss": 0.0615,
2858
+ "step": 407
2859
+ },
2860
+ {
2861
+ "epoch": 2.3924914675767917,
2862
+ "grad_norm": 0.22828808426856995,
2863
+ "learning_rate": 1.1697777844051105e-06,
2864
+ "loss": 0.0469,
2865
+ "step": 408
2866
+ },
2867
+ {
2868
+ "epoch": 2.3983422720624086,
2869
+ "grad_norm": 0.28332284092903137,
2870
+ "learning_rate": 1.1478701025036359e-06,
2871
+ "loss": 0.0581,
2872
+ "step": 409
2873
+ },
2874
+ {
2875
+ "epoch": 2.4041930765480255,
2876
+ "grad_norm": 0.2843197286128998,
2877
+ "learning_rate": 1.126142877547826e-06,
2878
+ "loss": 0.0633,
2879
+ "step": 410
2880
+ },
2881
+ {
2882
+ "epoch": 2.410043881033642,
2883
+ "grad_norm": 0.2659061849117279,
2884
+ "learning_rate": 1.1045971273716476e-06,
2885
+ "loss": 0.0522,
2886
+ "step": 411
2887
+ },
2888
+ {
2889
+ "epoch": 2.415894685519259,
2890
+ "grad_norm": 0.28145578503608704,
2891
+ "learning_rate": 1.083233861307697e-06,
2892
+ "loss": 0.0598,
2893
+ "step": 412
2894
+ },
2895
+ {
2896
+ "epoch": 2.421745490004876,
2897
+ "grad_norm": 0.29039686918258667,
2898
+ "learning_rate": 1.062054080139916e-06,
2899
+ "loss": 0.0585,
2900
+ "step": 413
2901
+ },
2902
+ {
2903
+ "epoch": 2.4275962944904923,
2904
+ "grad_norm": 0.2717377543449402,
2905
+ "learning_rate": 1.0410587760567104e-06,
2906
+ "loss": 0.0556,
2907
+ "step": 414
2908
+ },
2909
+ {
2910
+ "epoch": 2.4334470989761092,
2911
+ "grad_norm": 0.283588171005249,
2912
+ "learning_rate": 1.0202489326044663e-06,
2913
+ "loss": 0.0586,
2914
+ "step": 415
2915
+ },
2916
+ {
2917
+ "epoch": 2.439297903461726,
2918
+ "grad_norm": 0.2644156515598297,
2919
+ "learning_rate": 9.99625524641481e-07,
2920
+ "loss": 0.0519,
2921
+ "step": 416
2922
+ },
2923
+ {
2924
+ "epoch": 2.4451487079473426,
2925
+ "grad_norm": 0.28385522961616516,
2926
+ "learning_rate": 9.791895182922911e-07,
2927
+ "loss": 0.0584,
2928
+ "step": 417
2929
+ },
2930
+ {
2931
+ "epoch": 2.4509995124329595,
2932
+ "grad_norm": 0.2593843340873718,
2933
+ "learning_rate": 9.589418709024146e-07,
2934
+ "loss": 0.0582,
2935
+ "step": 418
2936
+ },
2937
+ {
2938
+ "epoch": 2.4568503169185765,
2939
+ "grad_norm": 0.28470900654792786,
2940
+ "learning_rate": 9.388835309934985e-07,
2941
+ "loss": 0.0651,
2942
+ "step": 419
2943
+ },
2944
+ {
2945
+ "epoch": 2.462701121404193,
2946
+ "grad_norm": 0.23977982997894287,
2947
+ "learning_rate": 9.190154382188921e-07,
2948
+ "loss": 0.0511,
2949
+ "step": 420
2950
+ },
2951
+ {
2952
+ "epoch": 2.46855192588981,
2953
+ "grad_norm": 0.2712235152721405,
2954
+ "learning_rate": 8.993385233196223e-07,
2955
+ "loss": 0.0545,
2956
+ "step": 421
2957
+ },
2958
+ {
2959
+ "epoch": 2.474402730375427,
2960
+ "grad_norm": 0.256510466337204,
2961
+ "learning_rate": 8.79853708080795e-07,
2962
+ "loss": 0.0563,
2963
+ "step": 422
2964
+ },
2965
+ {
2966
+ "epoch": 2.4802535348610433,
2967
+ "grad_norm": 0.27964967489242554,
2968
+ "learning_rate": 8.605619052884106e-07,
2969
+ "loss": 0.0634,
2970
+ "step": 423
2971
+ },
2972
+ {
2973
+ "epoch": 2.48610433934666,
2974
+ "grad_norm": 0.2607182264328003,
2975
+ "learning_rate": 8.414640186866063e-07,
2976
+ "loss": 0.0455,
2977
+ "step": 424
2978
+ },
2979
+ {
2980
+ "epoch": 2.491955143832277,
2981
+ "grad_norm": 0.28515201807022095,
2982
+ "learning_rate": 8.225609429353187e-07,
2983
+ "loss": 0.0582,
2984
+ "step": 425
2985
+ },
2986
+ {
2987
+ "epoch": 2.4978059483178936,
2988
+ "grad_norm": 0.29618608951568604,
2989
+ "learning_rate": 8.03853563568367e-07,
2990
+ "loss": 0.0615,
2991
+ "step": 426
2992
+ },
2993
+ {
2994
+ "epoch": 2.5036567528035105,
2995
+ "grad_norm": 0.30800962448120117,
2996
+ "learning_rate": 7.8534275695198e-07,
2997
+ "loss": 0.0654,
2998
+ "step": 427
2999
+ },
3000
+ {
3001
+ "epoch": 2.509507557289127,
3002
+ "grad_norm": 0.29063132405281067,
3003
+ "learning_rate": 7.670293902437331e-07,
3004
+ "loss": 0.0566,
3005
+ "step": 428
3006
+ },
3007
+ {
3008
+ "epoch": 2.515358361774744,
3009
+ "grad_norm": 0.28903132677078247,
3010
+ "learning_rate": 7.489143213519301e-07,
3011
+ "loss": 0.0635,
3012
+ "step": 429
3013
+ },
3014
+ {
3015
+ "epoch": 2.521209166260361,
3016
+ "grad_norm": 0.2604012191295624,
3017
+ "learning_rate": 7.309983988954078e-07,
3018
+ "loss": 0.0502,
3019
+ "step": 430
3020
+ },
3021
+ {
3022
+ "epoch": 2.5270599707459773,
3023
+ "grad_norm": 0.30102649331092834,
3024
+ "learning_rate": 7.132824621637891e-07,
3025
+ "loss": 0.057,
3026
+ "step": 431
3027
+ },
3028
+ {
3029
+ "epoch": 2.5329107752315942,
3030
+ "grad_norm": 0.2743903696537018,
3031
+ "learning_rate": 6.957673410781617e-07,
3032
+ "loss": 0.0458,
3033
+ "step": 432
3034
+ },
3035
+ {
3036
+ "epoch": 2.538761579717211,
3037
+ "grad_norm": 0.30411839485168457,
3038
+ "learning_rate": 6.784538561521986e-07,
3039
+ "loss": 0.0696,
3040
+ "step": 433
3041
+ },
3042
+ {
3043
+ "epoch": 2.5446123842028276,
3044
+ "grad_norm": 0.26881927251815796,
3045
+ "learning_rate": 6.613428184537235e-07,
3046
+ "loss": 0.0585,
3047
+ "step": 434
3048
+ },
3049
+ {
3050
+ "epoch": 2.5504631886884446,
3051
+ "grad_norm": 0.2715945541858673,
3052
+ "learning_rate": 6.444350295667112e-07,
3053
+ "loss": 0.0506,
3054
+ "step": 435
3055
+ },
3056
+ {
3057
+ "epoch": 2.5563139931740615,
3058
+ "grad_norm": 0.27122291922569275,
3059
+ "learning_rate": 6.277312815537423e-07,
3060
+ "loss": 0.0548,
3061
+ "step": 436
3062
+ },
3063
+ {
3064
+ "epoch": 2.562164797659678,
3065
+ "grad_norm": 0.30401352047920227,
3066
+ "learning_rate": 6.112323569188927e-07,
3067
+ "loss": 0.0536,
3068
+ "step": 437
3069
+ },
3070
+ {
3071
+ "epoch": 2.568015602145295,
3072
+ "grad_norm": 0.24554386734962463,
3073
+ "learning_rate": 5.949390285710777e-07,
3074
+ "loss": 0.0519,
3075
+ "step": 438
3076
+ },
3077
+ {
3078
+ "epoch": 2.573866406630912,
3079
+ "grad_norm": 0.27577313780784607,
3080
+ "learning_rate": 5.788520597878477e-07,
3081
+ "loss": 0.0529,
3082
+ "step": 439
3083
+ },
3084
+ {
3085
+ "epoch": 2.5797172111165283,
3086
+ "grad_norm": 0.3495662212371826,
3087
+ "learning_rate": 5.629722041796292e-07,
3088
+ "loss": 0.0569,
3089
+ "step": 440
3090
+ },
3091
+ {
3092
+ "epoch": 2.585568015602145,
3093
+ "grad_norm": 0.2668460011482239,
3094
+ "learning_rate": 5.473002056544191e-07,
3095
+ "loss": 0.0453,
3096
+ "step": 441
3097
+ },
3098
+ {
3099
+ "epoch": 2.591418820087762,
3100
+ "grad_norm": 0.2764504551887512,
3101
+ "learning_rate": 5.318367983829393e-07,
3102
+ "loss": 0.0573,
3103
+ "step": 442
3104
+ },
3105
+ {
3106
+ "epoch": 2.5972696245733786,
3107
+ "grad_norm": 0.2935810983181,
3108
+ "learning_rate": 5.165827067642415e-07,
3109
+ "loss": 0.0495,
3110
+ "step": 443
3111
+ },
3112
+ {
3113
+ "epoch": 2.6031204290589955,
3114
+ "grad_norm": 0.2925431728363037,
3115
+ "learning_rate": 5.015386453917742e-07,
3116
+ "loss": 0.0535,
3117
+ "step": 444
3118
+ },
3119
+ {
3120
+ "epoch": 2.6089712335446125,
3121
+ "grad_norm": 0.2892134487628937,
3122
+ "learning_rate": 4.867053190199011e-07,
3123
+ "loss": 0.0607,
3124
+ "step": 445
3125
+ },
3126
+ {
3127
+ "epoch": 2.614822038030229,
3128
+ "grad_norm": 0.28982144594192505,
3129
+ "learning_rate": 4.720834225308962e-07,
3130
+ "loss": 0.0612,
3131
+ "step": 446
3132
+ },
3133
+ {
3134
+ "epoch": 2.620672842515846,
3135
+ "grad_norm": 0.24418623745441437,
3136
+ "learning_rate": 4.576736409023813e-07,
3137
+ "loss": 0.051,
3138
+ "step": 447
3139
+ },
3140
+ {
3141
+ "epoch": 2.626523647001463,
3142
+ "grad_norm": 0.26711544394493103,
3143
+ "learning_rate": 4.4347664917524293e-07,
3144
+ "loss": 0.0598,
3145
+ "step": 448
3146
+ },
3147
+ {
3148
+ "epoch": 2.6323744514870793,
3149
+ "grad_norm": 0.28195297718048096,
3150
+ "learning_rate": 4.29493112422007e-07,
3151
+ "loss": 0.0546,
3152
+ "step": 449
3153
+ },
3154
+ {
3155
+ "epoch": 2.638225255972696,
3156
+ "grad_norm": 0.29547280073165894,
3157
+ "learning_rate": 4.15723685715686e-07,
3158
+ "loss": 0.0535,
3159
+ "step": 450
3160
+ },
3161
+ {
3162
+ "epoch": 2.644076060458313,
3163
+ "grad_norm": 0.2511988878250122,
3164
+ "learning_rate": 4.0216901409908695e-07,
3165
+ "loss": 0.0509,
3166
+ "step": 451
3167
+ },
3168
+ {
3169
+ "epoch": 2.6499268649439296,
3170
+ "grad_norm": 0.28745976090431213,
3171
+ "learning_rate": 3.8882973255459975e-07,
3172
+ "loss": 0.0502,
3173
+ "step": 452
3174
+ },
3175
+ {
3176
+ "epoch": 2.6557776694295465,
3177
+ "grad_norm": 0.2565235495567322,
3178
+ "learning_rate": 3.7570646597444196e-07,
3179
+ "loss": 0.0477,
3180
+ "step": 453
3181
+ },
3182
+ {
3183
+ "epoch": 2.6616284739151634,
3184
+ "grad_norm": 0.29118213057518005,
3185
+ "learning_rate": 3.627998291313939e-07,
3186
+ "loss": 0.0669,
3187
+ "step": 454
3188
+ },
3189
+ {
3190
+ "epoch": 2.66747927840078,
3191
+ "grad_norm": 0.29614195227622986,
3192
+ "learning_rate": 3.5011042664999663e-07,
3193
+ "loss": 0.0574,
3194
+ "step": 455
3195
+ },
3196
+ {
3197
+ "epoch": 2.673330082886397,
3198
+ "grad_norm": 0.25614967942237854,
3199
+ "learning_rate": 3.3763885297822153e-07,
3200
+ "loss": 0.0588,
3201
+ "step": 456
3202
+ },
3203
+ {
3204
+ "epoch": 2.6791808873720138,
3205
+ "grad_norm": 0.27203789353370667,
3206
+ "learning_rate": 3.2538569235963216e-07,
3207
+ "loss": 0.0508,
3208
+ "step": 457
3209
+ },
3210
+ {
3211
+ "epoch": 2.6850316918576302,
3212
+ "grad_norm": 0.26867544651031494,
3213
+ "learning_rate": 3.133515188060077e-07,
3214
+ "loss": 0.055,
3215
+ "step": 458
3216
+ },
3217
+ {
3218
+ "epoch": 2.690882496343247,
3219
+ "grad_norm": 0.28810831904411316,
3220
+ "learning_rate": 3.015368960704584e-07,
3221
+ "loss": 0.0602,
3222
+ "step": 459
3223
+ },
3224
+ {
3225
+ "epoch": 2.696733300828864,
3226
+ "grad_norm": 0.28718477487564087,
3227
+ "learning_rate": 2.899423776210092e-07,
3228
+ "loss": 0.0625,
3229
+ "step": 460
3230
+ },
3231
+ {
3232
+ "epoch": 2.7025841053144806,
3233
+ "grad_norm": 0.2641659379005432,
3234
+ "learning_rate": 2.785685066146776e-07,
3235
+ "loss": 0.057,
3236
+ "step": 461
3237
+ },
3238
+ {
3239
+ "epoch": 2.7084349098000975,
3240
+ "grad_norm": 0.3124268352985382,
3241
+ "learning_rate": 2.6741581587202747e-07,
3242
+ "loss": 0.0567,
3243
+ "step": 462
3244
+ },
3245
+ {
3246
+ "epoch": 2.7142857142857144,
3247
+ "grad_norm": 0.26910921931266785,
3248
+ "learning_rate": 2.5648482785220865e-07,
3249
+ "loss": 0.0603,
3250
+ "step": 463
3251
+ },
3252
+ {
3253
+ "epoch": 2.720136518771331,
3254
+ "grad_norm": 0.26910898089408875,
3255
+ "learning_rate": 2.4577605462847764e-07,
3256
+ "loss": 0.0575,
3257
+ "step": 464
3258
+ },
3259
+ {
3260
+ "epoch": 2.725987323256948,
3261
+ "grad_norm": 0.2963363230228424,
3262
+ "learning_rate": 2.3528999786421758e-07,
3263
+ "loss": 0.0556,
3264
+ "step": 465
3265
+ },
3266
+ {
3267
+ "epoch": 2.7318381277425647,
3268
+ "grad_norm": 0.2956644892692566,
3269
+ "learning_rate": 2.25027148789429e-07,
3270
+ "loss": 0.0594,
3271
+ "step": 466
3272
+ },
3273
+ {
3274
+ "epoch": 2.737688932228181,
3275
+ "grad_norm": 0.26555967330932617,
3276
+ "learning_rate": 2.1498798817772281e-07,
3277
+ "loss": 0.059,
3278
+ "step": 467
3279
+ },
3280
+ {
3281
+ "epoch": 2.743539736713798,
3282
+ "grad_norm": 0.2682870626449585,
3283
+ "learning_rate": 2.0517298632379445e-07,
3284
+ "loss": 0.0531,
3285
+ "step": 468
3286
+ },
3287
+ {
3288
+ "epoch": 2.749390541199415,
3289
+ "grad_norm": 0.2852657437324524,
3290
+ "learning_rate": 1.9558260302139642e-07,
3291
+ "loss": 0.0498,
3292
+ "step": 469
3293
+ },
3294
+ {
3295
+ "epoch": 2.7552413456850315,
3296
+ "grad_norm": 0.27579066157341003,
3297
+ "learning_rate": 1.8621728754179392e-07,
3298
+ "loss": 0.0621,
3299
+ "step": 470
3300
+ },
3301
+ {
3302
+ "epoch": 2.7610921501706485,
3303
+ "grad_norm": 0.29085394740104675,
3304
+ "learning_rate": 1.770774786127244e-07,
3305
+ "loss": 0.0575,
3306
+ "step": 471
3307
+ },
3308
+ {
3309
+ "epoch": 2.7669429546562654,
3310
+ "grad_norm": 0.2594444155693054,
3311
+ "learning_rate": 1.6816360439783797e-07,
3312
+ "loss": 0.0522,
3313
+ "step": 472
3314
+ },
3315
+ {
3316
+ "epoch": 2.772793759141882,
3317
+ "grad_norm": 0.292959600687027,
3318
+ "learning_rate": 1.5947608247664558e-07,
3319
+ "loss": 0.0563,
3320
+ "step": 473
3321
+ },
3322
+ {
3323
+ "epoch": 2.778644563627499,
3324
+ "grad_norm": 0.2607786953449249,
3325
+ "learning_rate": 1.510153198249531e-07,
3326
+ "loss": 0.0505,
3327
+ "step": 474
3328
+ },
3329
+ {
3330
+ "epoch": 2.7844953681131157,
3331
+ "grad_norm": 0.285178542137146,
3332
+ "learning_rate": 1.4278171279579757e-07,
3333
+ "loss": 0.0601,
3334
+ "step": 475
3335
+ },
3336
+ {
3337
+ "epoch": 2.790346172598732,
3338
+ "grad_norm": 0.28294607996940613,
3339
+ "learning_rate": 1.3477564710088097e-07,
3340
+ "loss": 0.0539,
3341
+ "step": 476
3342
+ },
3343
+ {
3344
+ "epoch": 2.796196977084349,
3345
+ "grad_norm": 0.29305973649024963,
3346
+ "learning_rate": 1.2699749779249926e-07,
3347
+ "loss": 0.0648,
3348
+ "step": 477
3349
+ },
3350
+ {
3351
+ "epoch": 2.802047781569966,
3352
+ "grad_norm": 0.2848883867263794,
3353
+ "learning_rate": 1.1944762924597286e-07,
3354
+ "loss": 0.0596,
3355
+ "step": 478
3356
+ },
3357
+ {
3358
+ "epoch": 2.8078985860555825,
3359
+ "grad_norm": 0.27572354674339294,
3360
+ "learning_rate": 1.1212639514257829e-07,
3361
+ "loss": 0.0545,
3362
+ "step": 479
3363
+ },
3364
+ {
3365
+ "epoch": 2.8137493905411994,
3366
+ "grad_norm": 0.2800915539264679,
3367
+ "learning_rate": 1.0503413845297739e-07,
3368
+ "loss": 0.0586,
3369
+ "step": 480
3370
+ },
3371
+ {
3372
+ "epoch": 2.8196001950268164,
3373
+ "grad_norm": 0.27929428219795227,
3374
+ "learning_rate": 9.817119142115472e-08,
3375
+ "loss": 0.0535,
3376
+ "step": 481
3377
+ },
3378
+ {
3379
+ "epoch": 2.825450999512433,
3380
+ "grad_norm": 0.29202061891555786,
3381
+ "learning_rate": 9.15378755488483e-08,
3382
+ "loss": 0.0677,
3383
+ "step": 482
3384
+ },
3385
+ {
3386
+ "epoch": 2.8313018039980498,
3387
+ "grad_norm": 0.2733800411224365,
3388
+ "learning_rate": 8.513450158049109e-08,
3389
+ "loss": 0.057,
3390
+ "step": 483
3391
+ },
3392
+ {
3393
+ "epoch": 2.8371526084836667,
3394
+ "grad_norm": 0.27349552512168884,
3395
+ "learning_rate": 7.896136948865429e-08,
3396
+ "loss": 0.0595,
3397
+ "step": 484
3398
+ },
3399
+ {
3400
+ "epoch": 2.843003412969283,
3401
+ "grad_norm": 0.3204438388347626,
3402
+ "learning_rate": 7.301876845999368e-08,
3403
+ "loss": 0.0638,
3404
+ "step": 485
3405
+ },
3406
+ {
3407
+ "epoch": 2.8488542174549,
3408
+ "grad_norm": 0.2688818573951721,
3409
+ "learning_rate": 6.730697688170251e-08,
3410
+ "loss": 0.0514,
3411
+ "step": 486
3412
+ },
3413
+ {
3414
+ "epoch": 2.854705021940517,
3415
+ "grad_norm": 0.27996474504470825,
3416
+ "learning_rate": 6.182626232847044e-08,
3417
+ "loss": 0.06,
3418
+ "step": 487
3419
+ },
3420
+ {
3421
+ "epoch": 2.8605558264261335,
3422
+ "grad_norm": 0.28294089436531067,
3423
+ "learning_rate": 5.6576881549949e-08,
3424
+ "loss": 0.0476,
3425
+ "step": 488
3426
+ },
3427
+ {
3428
+ "epoch": 2.8664066309117504,
3429
+ "grad_norm": 0.26451075077056885,
3430
+ "learning_rate": 5.155908045872349e-08,
3431
+ "loss": 0.0587,
3432
+ "step": 489
3433
+ },
3434
+ {
3435
+ "epoch": 2.8722574353973673,
3436
+ "grad_norm": 0.28928130865097046,
3437
+ "learning_rate": 4.677309411879327e-08,
3438
+ "loss": 0.0518,
3439
+ "step": 490
3440
+ },
3441
+ {
3442
+ "epoch": 2.878108239882984,
3443
+ "grad_norm": 0.2694530487060547,
3444
+ "learning_rate": 4.221914673455896e-08,
3445
+ "loss": 0.0541,
3446
+ "step": 491
3447
+ },
3448
+ {
3449
+ "epoch": 2.8839590443686007,
3450
+ "grad_norm": 0.3015761077404022,
3451
+ "learning_rate": 3.7897451640321326e-08,
3452
+ "loss": 0.0602,
3453
+ "step": 492
3454
+ },
3455
+ {
3456
+ "epoch": 2.8898098488542177,
3457
+ "grad_norm": 0.2916721999645233,
3458
+ "learning_rate": 3.3808211290284886e-08,
3459
+ "loss": 0.0565,
3460
+ "step": 493
3461
+ },
3462
+ {
3463
+ "epoch": 2.895660653339834,
3464
+ "grad_norm": 0.2948850989341736,
3465
+ "learning_rate": 2.995161724907658e-08,
3466
+ "loss": 0.0619,
3467
+ "step": 494
3468
+ },
3469
+ {
3470
+ "epoch": 2.901511457825451,
3471
+ "grad_norm": 0.310370534658432,
3472
+ "learning_rate": 2.6327850182769065e-08,
3473
+ "loss": 0.0604,
3474
+ "step": 495
3475
+ },
3476
+ {
3477
+ "epoch": 2.907362262311068,
3478
+ "grad_norm": 0.2993476092815399,
3479
+ "learning_rate": 2.29370798504186e-08,
3480
+ "loss": 0.0581,
3481
+ "step": 496
3482
+ },
3483
+ {
3484
+ "epoch": 2.9132130667966845,
3485
+ "grad_norm": 0.2968423366546631,
3486
+ "learning_rate": 1.9779465096112505e-08,
3487
+ "loss": 0.0571,
3488
+ "step": 497
3489
+ },
3490
+ {
3491
+ "epoch": 2.9190638712823014,
3492
+ "grad_norm": 0.2754110097885132,
3493
+ "learning_rate": 1.6855153841527915e-08,
3494
+ "loss": 0.0585,
3495
+ "step": 498
3496
+ },
3497
+ {
3498
+ "epoch": 2.9249146757679183,
3499
+ "grad_norm": 0.28342047333717346,
3500
+ "learning_rate": 1.4164283079001196e-08,
3501
+ "loss": 0.0567,
3502
+ "step": 499
3503
+ },
3504
+ {
3505
+ "epoch": 2.930765480253535,
3506
+ "grad_norm": 0.2792755961418152,
3507
+ "learning_rate": 1.1706978865113072e-08,
3508
+ "loss": 0.0497,
3509
+ "step": 500
3510
+ },
3511
+ {
3512
+ "epoch": 2.9366162847391517,
3513
+ "grad_norm": 0.3103918135166168,
3514
+ "learning_rate": 9.48335631477948e-09,
3515
+ "loss": 0.0637,
3516
+ "step": 501
3517
+ },
3518
+ {
3519
+ "epoch": 2.9424670892247686,
3520
+ "grad_norm": 0.2594921588897705,
3521
+ "learning_rate": 7.49351959586253e-09,
3522
+ "loss": 0.0587,
3523
+ "step": 502
3524
+ },
3525
+ {
3526
+ "epoch": 2.948317893710385,
3527
+ "grad_norm": 0.2774949371814728,
3528
+ "learning_rate": 5.737561924288315e-09,
3529
+ "loss": 0.0536,
3530
+ "step": 503
3531
+ },
3532
+ {
3533
+ "epoch": 2.954168698196002,
3534
+ "grad_norm": 0.3025616407394409,
3535
+ "learning_rate": 4.2155655596809455e-09,
3536
+ "loss": 0.0559,
3537
+ "step": 504
3538
+ },
3539
+ {
3540
+ "epoch": 2.960019502681619,
3541
+ "grad_norm": 0.2954673767089844,
3542
+ "learning_rate": 2.9276018015089725e-09,
3543
+ "loss": 0.0669,
3544
+ "step": 505
3545
+ },
3546
+ {
3547
+ "epoch": 2.9658703071672354,
3548
+ "grad_norm": 0.2557486295700073,
3549
+ "learning_rate": 1.8737309857463916e-09,
3550
+ "loss": 0.0663,
3551
+ "step": 506
3552
+ },
3553
+ {
3554
+ "epoch": 2.9717211116528524,
3555
+ "grad_norm": 0.29354172945022583,
3556
+ "learning_rate": 1.054002482043237e-09,
3557
+ "loss": 0.0623,
3558
+ "step": 507
3559
+ },
3560
+ {
3561
+ "epoch": 2.9775719161384693,
3562
+ "grad_norm": 0.3089665174484253,
3563
+ "learning_rate": 4.684546914163201e-10,
3564
+ "loss": 0.054,
3565
+ "step": 508
3566
+ },
3567
+ {
3568
+ "epoch": 2.9834227206240858,
3569
+ "grad_norm": 0.3170253038406372,
3570
+ "learning_rate": 1.1711504444733567e-10,
3571
+ "loss": 0.0626,
3572
+ "step": 509
3573
+ },
3574
+ {
3575
+ "epoch": 2.9892735251097027,
3576
+ "grad_norm": 0.2732970118522644,
3577
+ "learning_rate": 0.0,
3578
+ "loss": 0.0551,
3579
+ "step": 510
3580
+ },
3581
+ {
3582
+ "epoch": 2.9892735251097027,
3583
+ "step": 510,
3584
+ "total_flos": 1494941780148224.0,
3585
+ "train_loss": 0.029626486676872944,
3586
+ "train_runtime": 67167.8945,
3587
+ "train_samples_per_second": 0.733,
3588
+ "train_steps_per_second": 0.008
3589
+ }
3590
+ ],
3591
+ "logging_steps": 1,
3592
+ "max_steps": 510,
3593
+ "num_input_tokens_seen": 0,
3594
+ "num_train_epochs": 3,
3595
+ "save_steps": 50,
3596
+ "stateful_callbacks": {
3597
+ "TrainerControl": {
3598
+ "args": {
3599
+ "should_epoch_stop": false,
3600
+ "should_evaluate": false,
3601
+ "should_log": false,
3602
+ "should_save": true,
3603
+ "should_training_stop": true
3604
+ },
3605
+ "attributes": {}
3606
+ }
3607
+ },
3608
+ "total_flos": 1494941780148224.0,
3609
+ "train_batch_size": 1,
3610
+ "trial_name": null,
3611
+ "trial_params": null
3612
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6897fe15ebfcec7e006f6f475f7a65c74c8d3288c5be27ccaff0c77b1b2b4ef5
3
+ size 7480
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff