ryanmarten
commited on
Upload model
Browse files- .gitattributes +1 -0
- README.md +61 -0
- added_tokens.json +24 -0
- all_results.json +8 -0
- config.json +29 -0
- generation_config.json +14 -0
- merges.txt +0 -0
- model-00001-of-00014.safetensors +3 -0
- model-00002-of-00014.safetensors +3 -0
- model-00003-of-00014.safetensors +3 -0
- model-00004-of-00014.safetensors +3 -0
- model-00005-of-00014.safetensors +3 -0
- model-00006-of-00014.safetensors +3 -0
- model-00007-of-00014.safetensors +3 -0
- model-00008-of-00014.safetensors +3 -0
- model-00009-of-00014.safetensors +3 -0
- model-00010-of-00014.safetensors +3 -0
- model-00011-of-00014.safetensors +3 -0
- model-00012-of-00014.safetensors +3 -0
- model-00013-of-00014.safetensors +3 -0
- model-00014-of-00014.safetensors +3 -0
- model.safetensors.index.json +778 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +208 -0
- train_results.json +8 -0
- trainer_log.jsonl +0 -0
- trainer_state.json +3696 -0
- training_args.bin +3 -0
- training_loss.png +0 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: transformers
|
3 |
+
license: other
|
4 |
+
base_model: Qwen/Qwen2.5-32B-Instruct
|
5 |
+
tags:
|
6 |
+
- llama-factory
|
7 |
+
- full
|
8 |
+
- generated_from_trainer
|
9 |
+
model-index:
|
10 |
+
- name: original
|
11 |
+
results: []
|
12 |
+
---
|
13 |
+
|
14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
15 |
+
should probably proofread and complete it, then remove this comment. -->
|
16 |
+
|
17 |
+
# original
|
18 |
+
|
19 |
+
This model is a fine-tuned version of [Qwen/Qwen2.5-32B-Instruct](https://huggingface.co/Qwen/Qwen2.5-32B-Instruct) on the Stratos-R1 dataset.
|
20 |
+
|
21 |
+
## Model description
|
22 |
+
|
23 |
+
More information needed
|
24 |
+
|
25 |
+
## Intended uses & limitations
|
26 |
+
|
27 |
+
More information needed
|
28 |
+
|
29 |
+
## Training and evaluation data
|
30 |
+
|
31 |
+
More information needed
|
32 |
+
|
33 |
+
## Training procedure
|
34 |
+
|
35 |
+
### Training hyperparameters
|
36 |
+
|
37 |
+
The following hyperparameters were used during training:
|
38 |
+
- learning_rate: 1e-05
|
39 |
+
- train_batch_size: 1
|
40 |
+
- eval_batch_size: 8
|
41 |
+
- seed: 42
|
42 |
+
- distributed_type: multi-GPU
|
43 |
+
- num_devices: 8
|
44 |
+
- gradient_accumulation_steps: 12
|
45 |
+
- total_train_batch_size: 96
|
46 |
+
- total_eval_batch_size: 64
|
47 |
+
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
48 |
+
- lr_scheduler_type: cosine
|
49 |
+
- lr_scheduler_warmup_ratio: 0.1
|
50 |
+
- num_epochs: 3.0
|
51 |
+
|
52 |
+
### Training results
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
### Framework versions
|
57 |
+
|
58 |
+
- Transformers 4.46.1
|
59 |
+
- Pytorch 2.5.1+cu124
|
60 |
+
- Datasets 3.1.0
|
61 |
+
- Tokenizers 0.20.3
|
added_tokens.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</tool_call>": 151658,
|
3 |
+
"<tool_call>": 151657,
|
4 |
+
"<|box_end|>": 151649,
|
5 |
+
"<|box_start|>": 151648,
|
6 |
+
"<|endoftext|>": 151643,
|
7 |
+
"<|file_sep|>": 151664,
|
8 |
+
"<|fim_middle|>": 151660,
|
9 |
+
"<|fim_pad|>": 151662,
|
10 |
+
"<|fim_prefix|>": 151659,
|
11 |
+
"<|fim_suffix|>": 151661,
|
12 |
+
"<|im_end|>": 151645,
|
13 |
+
"<|im_start|>": 151644,
|
14 |
+
"<|image_pad|>": 151655,
|
15 |
+
"<|object_ref_end|>": 151647,
|
16 |
+
"<|object_ref_start|>": 151646,
|
17 |
+
"<|quad_end|>": 151651,
|
18 |
+
"<|quad_start|>": 151650,
|
19 |
+
"<|repo_name|>": 151663,
|
20 |
+
"<|video_pad|>": 151656,
|
21 |
+
"<|vision_end|>": 151653,
|
22 |
+
"<|vision_pad|>": 151654,
|
23 |
+
"<|vision_start|>": 151652
|
24 |
+
}
|
all_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 2.998563906175203,
|
3 |
+
"total_flos": 1806937112969216.0,
|
4 |
+
"train_loss": 0.43391252791516166,
|
5 |
+
"train_runtime": 96882.8659,
|
6 |
+
"train_samples_per_second": 0.517,
|
7 |
+
"train_steps_per_second": 0.005
|
8 |
+
}
|
config.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "Qwen/Qwen2.5-32B-Instruct",
|
3 |
+
"architectures": [
|
4 |
+
"Qwen2ForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"eos_token_id": 151645,
|
9 |
+
"hidden_act": "silu",
|
10 |
+
"hidden_size": 5120,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 27648,
|
13 |
+
"max_position_embeddings": 32768,
|
14 |
+
"max_window_layers": 70,
|
15 |
+
"model_type": "qwen2",
|
16 |
+
"num_attention_heads": 40,
|
17 |
+
"num_hidden_layers": 64,
|
18 |
+
"num_key_value_heads": 8,
|
19 |
+
"rms_norm_eps": 1e-06,
|
20 |
+
"rope_scaling": null,
|
21 |
+
"rope_theta": 1000000.0,
|
22 |
+
"sliding_window": null,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "bfloat16",
|
25 |
+
"transformers_version": "4.46.1",
|
26 |
+
"use_cache": false,
|
27 |
+
"use_sliding_window": false,
|
28 |
+
"vocab_size": 152064
|
29 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token_id": 151643,
|
3 |
+
"do_sample": true,
|
4 |
+
"eos_token_id": [
|
5 |
+
151645,
|
6 |
+
151643
|
7 |
+
],
|
8 |
+
"pad_token_id": 151643,
|
9 |
+
"repetition_penalty": 1.05,
|
10 |
+
"temperature": 0.7,
|
11 |
+
"top_k": 20,
|
12 |
+
"top_p": 0.8,
|
13 |
+
"transformers_version": "4.46.1"
|
14 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model-00001-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:32af224e777d5b97a0a3dd7a789d2fb96c4b8cede12730adc93f79ce9b21860b
|
3 |
+
size 4891730992
|
model-00002-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f658a2587249c09d4f1bb05d52bf87e4123c5a6349e4333b3dd88629160b51b
|
3 |
+
size 4876059352
|
model-00003-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7514aebfcbb85454d7601993f94141186e47479ca3ecf84cc114257003f7f121
|
3 |
+
size 4876059384
|
model-00004-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f39900d9a16f245a9a7aea2a176df287dab323a2b9a178aa1edf5c2727a4e999
|
3 |
+
size 4876059416
|
model-00005-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f46f791d5bcb27a2114647ad838d04ca8a9b9b013b7ac1c6813825de01a3fa0e
|
3 |
+
size 4876059416
|
model-00006-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4d6304c87bb65ff40a31b27a0ec1f00a3484aa6a487b8b9e3a0282ba87142d10
|
3 |
+
size 4876059416
|
model-00007-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53d1a8d8cb75d749f232ad88ebc27b7043e91b337c9467d24d03aeb08cc54417
|
3 |
+
size 4876059416
|
model-00008-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8e9cdd8ed55293dac6a4439dab845e4c9a0cdc194e2df83105cb1dae6a81e564
|
3 |
+
size 4876059416
|
model-00009-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb199baf8f9e483536e15a6340be52d968f44f36bc704a64e4a61b6203ab47ce
|
3 |
+
size 4876059416
|
model-00010-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b9af52e354d83b8252d081fe7f4160d9b81d643387122ee4e3f1dd267496ae2a
|
3 |
+
size 4876059416
|
model-00011-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0be921c696c544eb26a7fbf5af91e5200e875dd105862827f93417f22e7756fc
|
3 |
+
size 4876059416
|
model-00012-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9a3d9e32f81d09d38c5dcf26241bf7988018e19b22df4ae179c552725ae0f90d
|
3 |
+
size 4876059416
|
model-00013-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8aee87989ba67976b9e6fb7309ee24fe595c5d7ab471193912d045cadad3078d
|
3 |
+
size 4876059416
|
model-00014-of-00014.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:89da63652b1652ea77624e7f656c7f28b2dbd55fa6e64fbc8d4db5d958e79859
|
3 |
+
size 2123397800
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,778 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 65527752704
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00014-of-00014.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00014.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
44 |
+
"model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
56 |
+
"model.layers.12.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
68 |
+
"model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
80 |
+
"model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
92 |
+
"model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
104 |
+
"model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
116 |
+
"model.layers.17.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
128 |
+
"model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
140 |
+
"model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
164 |
+
"model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
176 |
+
"model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
188 |
+
"model.layers.22.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
200 |
+
"model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
212 |
+
"model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
224 |
+
"model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
236 |
+
"model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
248 |
+
"model.layers.27.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
260 |
+
"model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
261 |
+
"model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
262 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
263 |
+
"model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
264 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
265 |
+
"model.layers.28.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
266 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
267 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
268 |
+
"model.layers.28.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
269 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
270 |
+
"model.layers.28.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
271 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
272 |
+
"model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
273 |
+
"model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
274 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
275 |
+
"model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
276 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
277 |
+
"model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
278 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
279 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
280 |
+
"model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
281 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
282 |
+
"model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
283 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
284 |
+
"model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
285 |
+
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
286 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
287 |
+
"model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
288 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
289 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
290 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
291 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
292 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
293 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
294 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
295 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
296 |
+
"model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
297 |
+
"model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
298 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
299 |
+
"model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
300 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
301 |
+
"model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
302 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
303 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
304 |
+
"model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
305 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
306 |
+
"model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
307 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
308 |
+
"model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
309 |
+
"model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
310 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
311 |
+
"model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
312 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
313 |
+
"model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
314 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
315 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
316 |
+
"model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
317 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
318 |
+
"model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
319 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
320 |
+
"model.layers.32.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
321 |
+
"model.layers.32.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
322 |
+
"model.layers.32.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
323 |
+
"model.layers.32.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
324 |
+
"model.layers.32.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
325 |
+
"model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
326 |
+
"model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
327 |
+
"model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
328 |
+
"model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
329 |
+
"model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
330 |
+
"model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
331 |
+
"model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
332 |
+
"model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
333 |
+
"model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
334 |
+
"model.layers.33.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
335 |
+
"model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
336 |
+
"model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
337 |
+
"model.layers.33.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
338 |
+
"model.layers.33.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
339 |
+
"model.layers.33.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
340 |
+
"model.layers.33.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
341 |
+
"model.layers.33.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
342 |
+
"model.layers.33.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
343 |
+
"model.layers.33.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
344 |
+
"model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
345 |
+
"model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
346 |
+
"model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
347 |
+
"model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
348 |
+
"model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
349 |
+
"model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
350 |
+
"model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
351 |
+
"model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
352 |
+
"model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
353 |
+
"model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
354 |
+
"model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
355 |
+
"model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
356 |
+
"model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
357 |
+
"model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
358 |
+
"model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
359 |
+
"model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
360 |
+
"model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
361 |
+
"model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
362 |
+
"model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
363 |
+
"model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
364 |
+
"model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
365 |
+
"model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
366 |
+
"model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
367 |
+
"model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
368 |
+
"model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
369 |
+
"model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
370 |
+
"model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
371 |
+
"model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
372 |
+
"model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
373 |
+
"model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
374 |
+
"model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
375 |
+
"model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
376 |
+
"model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
377 |
+
"model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
378 |
+
"model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
379 |
+
"model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
380 |
+
"model.layers.37.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
381 |
+
"model.layers.37.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
382 |
+
"model.layers.37.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
383 |
+
"model.layers.37.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
384 |
+
"model.layers.37.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
385 |
+
"model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
386 |
+
"model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
387 |
+
"model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
388 |
+
"model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
389 |
+
"model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
390 |
+
"model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
391 |
+
"model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
392 |
+
"model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
393 |
+
"model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
394 |
+
"model.layers.38.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
395 |
+
"model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
396 |
+
"model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
397 |
+
"model.layers.38.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
398 |
+
"model.layers.38.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
399 |
+
"model.layers.38.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
400 |
+
"model.layers.38.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
401 |
+
"model.layers.38.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
402 |
+
"model.layers.38.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
403 |
+
"model.layers.38.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
404 |
+
"model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
405 |
+
"model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
406 |
+
"model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
407 |
+
"model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
408 |
+
"model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
409 |
+
"model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
410 |
+
"model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
411 |
+
"model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
412 |
+
"model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
413 |
+
"model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
414 |
+
"model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
415 |
+
"model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
416 |
+
"model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
417 |
+
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
418 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
419 |
+
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
420 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
421 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
422 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
423 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
424 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
425 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
426 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
427 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
428 |
+
"model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
429 |
+
"model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
430 |
+
"model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
431 |
+
"model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
432 |
+
"model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
433 |
+
"model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
434 |
+
"model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
435 |
+
"model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
436 |
+
"model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
437 |
+
"model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
438 |
+
"model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
439 |
+
"model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
440 |
+
"model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
441 |
+
"model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
442 |
+
"model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
443 |
+
"model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
444 |
+
"model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
445 |
+
"model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
446 |
+
"model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
447 |
+
"model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
448 |
+
"model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
449 |
+
"model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
450 |
+
"model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
451 |
+
"model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
452 |
+
"model.layers.42.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
453 |
+
"model.layers.42.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
454 |
+
"model.layers.42.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
455 |
+
"model.layers.42.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
456 |
+
"model.layers.42.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
457 |
+
"model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
458 |
+
"model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
459 |
+
"model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
460 |
+
"model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
461 |
+
"model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
462 |
+
"model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
463 |
+
"model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
464 |
+
"model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
465 |
+
"model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
466 |
+
"model.layers.43.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
467 |
+
"model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
468 |
+
"model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
469 |
+
"model.layers.43.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
470 |
+
"model.layers.43.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
471 |
+
"model.layers.43.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
472 |
+
"model.layers.43.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
473 |
+
"model.layers.43.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
474 |
+
"model.layers.43.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
475 |
+
"model.layers.43.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
476 |
+
"model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
477 |
+
"model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
478 |
+
"model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
479 |
+
"model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
480 |
+
"model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
481 |
+
"model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
482 |
+
"model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
483 |
+
"model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
484 |
+
"model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
485 |
+
"model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
486 |
+
"model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
487 |
+
"model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
488 |
+
"model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
489 |
+
"model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
490 |
+
"model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
491 |
+
"model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
492 |
+
"model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
493 |
+
"model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
494 |
+
"model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
495 |
+
"model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
496 |
+
"model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
497 |
+
"model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
498 |
+
"model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
499 |
+
"model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
500 |
+
"model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
501 |
+
"model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
502 |
+
"model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
503 |
+
"model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
504 |
+
"model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
505 |
+
"model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
506 |
+
"model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
507 |
+
"model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
508 |
+
"model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
509 |
+
"model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
510 |
+
"model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
511 |
+
"model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
512 |
+
"model.layers.47.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
513 |
+
"model.layers.47.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
514 |
+
"model.layers.47.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
515 |
+
"model.layers.47.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
516 |
+
"model.layers.47.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
517 |
+
"model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
518 |
+
"model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
519 |
+
"model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
520 |
+
"model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
521 |
+
"model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
522 |
+
"model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
523 |
+
"model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
524 |
+
"model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
525 |
+
"model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
526 |
+
"model.layers.48.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
527 |
+
"model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
528 |
+
"model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
529 |
+
"model.layers.48.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
530 |
+
"model.layers.48.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
531 |
+
"model.layers.48.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
532 |
+
"model.layers.48.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
533 |
+
"model.layers.48.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
534 |
+
"model.layers.48.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
535 |
+
"model.layers.48.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
536 |
+
"model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
537 |
+
"model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
538 |
+
"model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
539 |
+
"model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
540 |
+
"model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
541 |
+
"model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
542 |
+
"model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
543 |
+
"model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
544 |
+
"model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
545 |
+
"model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
546 |
+
"model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
547 |
+
"model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
548 |
+
"model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
549 |
+
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
550 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
551 |
+
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
552 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
553 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
554 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
555 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
556 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
557 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
558 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
559 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
560 |
+
"model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
561 |
+
"model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
562 |
+
"model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
563 |
+
"model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
564 |
+
"model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
565 |
+
"model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
566 |
+
"model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
567 |
+
"model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
568 |
+
"model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
569 |
+
"model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
570 |
+
"model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
571 |
+
"model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
572 |
+
"model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
573 |
+
"model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
574 |
+
"model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
575 |
+
"model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
576 |
+
"model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
577 |
+
"model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
578 |
+
"model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
579 |
+
"model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
580 |
+
"model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
581 |
+
"model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
582 |
+
"model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
583 |
+
"model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
584 |
+
"model.layers.52.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
585 |
+
"model.layers.52.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
586 |
+
"model.layers.52.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
587 |
+
"model.layers.52.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
588 |
+
"model.layers.52.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
589 |
+
"model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
590 |
+
"model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
591 |
+
"model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
592 |
+
"model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
593 |
+
"model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
594 |
+
"model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
595 |
+
"model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
596 |
+
"model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
597 |
+
"model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
598 |
+
"model.layers.53.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
599 |
+
"model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
600 |
+
"model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
601 |
+
"model.layers.53.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
602 |
+
"model.layers.53.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
603 |
+
"model.layers.53.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
604 |
+
"model.layers.53.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
605 |
+
"model.layers.53.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
606 |
+
"model.layers.53.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
607 |
+
"model.layers.53.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
608 |
+
"model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
609 |
+
"model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
610 |
+
"model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
611 |
+
"model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
612 |
+
"model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
613 |
+
"model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
614 |
+
"model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
615 |
+
"model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
616 |
+
"model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
617 |
+
"model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
618 |
+
"model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
619 |
+
"model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
620 |
+
"model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
621 |
+
"model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
622 |
+
"model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
623 |
+
"model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
624 |
+
"model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
625 |
+
"model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
626 |
+
"model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
627 |
+
"model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
628 |
+
"model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
629 |
+
"model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
630 |
+
"model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
631 |
+
"model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
632 |
+
"model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
633 |
+
"model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
634 |
+
"model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
635 |
+
"model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
636 |
+
"model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
637 |
+
"model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
638 |
+
"model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
639 |
+
"model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
640 |
+
"model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
641 |
+
"model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
642 |
+
"model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
643 |
+
"model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
644 |
+
"model.layers.57.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
645 |
+
"model.layers.57.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
646 |
+
"model.layers.57.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
647 |
+
"model.layers.57.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
648 |
+
"model.layers.57.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
649 |
+
"model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
650 |
+
"model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
651 |
+
"model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
652 |
+
"model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
653 |
+
"model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
654 |
+
"model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
655 |
+
"model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
656 |
+
"model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
657 |
+
"model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
658 |
+
"model.layers.58.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
659 |
+
"model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
660 |
+
"model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
661 |
+
"model.layers.58.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
662 |
+
"model.layers.58.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
663 |
+
"model.layers.58.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
664 |
+
"model.layers.58.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
665 |
+
"model.layers.58.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
666 |
+
"model.layers.58.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
667 |
+
"model.layers.58.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
668 |
+
"model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
669 |
+
"model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
670 |
+
"model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
671 |
+
"model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
672 |
+
"model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
673 |
+
"model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
674 |
+
"model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
675 |
+
"model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
676 |
+
"model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
677 |
+
"model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
678 |
+
"model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
679 |
+
"model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
680 |
+
"model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
681 |
+
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
682 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
683 |
+
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
684 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
685 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
686 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
687 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
688 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
689 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
690 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
691 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
692 |
+
"model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
693 |
+
"model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
694 |
+
"model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
695 |
+
"model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
696 |
+
"model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
697 |
+
"model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
698 |
+
"model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
699 |
+
"model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
700 |
+
"model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
701 |
+
"model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
702 |
+
"model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
703 |
+
"model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
704 |
+
"model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
705 |
+
"model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
706 |
+
"model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
707 |
+
"model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
708 |
+
"model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
709 |
+
"model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
710 |
+
"model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
711 |
+
"model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
712 |
+
"model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
713 |
+
"model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
714 |
+
"model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
715 |
+
"model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
716 |
+
"model.layers.62.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
717 |
+
"model.layers.62.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
718 |
+
"model.layers.62.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
719 |
+
"model.layers.62.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
720 |
+
"model.layers.62.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
721 |
+
"model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
722 |
+
"model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
723 |
+
"model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
724 |
+
"model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
725 |
+
"model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
726 |
+
"model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
727 |
+
"model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
728 |
+
"model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
|
729 |
+
"model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
|
730 |
+
"model.layers.63.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
731 |
+
"model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
|
732 |
+
"model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
|
733 |
+
"model.layers.63.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
734 |
+
"model.layers.63.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
735 |
+
"model.layers.63.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
736 |
+
"model.layers.63.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
737 |
+
"model.layers.63.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
738 |
+
"model.layers.63.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
739 |
+
"model.layers.63.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
740 |
+
"model.layers.7.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
741 |
+
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
742 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
743 |
+
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
744 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
745 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
746 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
747 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
748 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
749 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
750 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
751 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
752 |
+
"model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
753 |
+
"model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
754 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
755 |
+
"model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
756 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
757 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
758 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
759 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
760 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
761 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
762 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
763 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
764 |
+
"model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
765 |
+
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
766 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
767 |
+
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
768 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
769 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
770 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
771 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
772 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
773 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
774 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
775 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
776 |
+
"model.norm.weight": "model-00014-of-00014.safetensors"
|
777 |
+
}
|
778 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>",
|
5 |
+
"<|object_ref_start|>",
|
6 |
+
"<|object_ref_end|>",
|
7 |
+
"<|box_start|>",
|
8 |
+
"<|box_end|>",
|
9 |
+
"<|quad_start|>",
|
10 |
+
"<|quad_end|>",
|
11 |
+
"<|vision_start|>",
|
12 |
+
"<|vision_end|>",
|
13 |
+
"<|vision_pad|>",
|
14 |
+
"<|image_pad|>",
|
15 |
+
"<|video_pad|>"
|
16 |
+
],
|
17 |
+
"eos_token": {
|
18 |
+
"content": "<|endoftext|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"pad_token": {
|
25 |
+
"content": "<|endoftext|>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
3 |
+
size 11421896
|
tokenizer_config.json
ADDED
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"151643": {
|
6 |
+
"content": "<|endoftext|>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"151644": {
|
14 |
+
"content": "<|im_start|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"151645": {
|
22 |
+
"content": "<|im_end|>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"151646": {
|
30 |
+
"content": "<|object_ref_start|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"151647": {
|
38 |
+
"content": "<|object_ref_end|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"151648": {
|
46 |
+
"content": "<|box_start|>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"151649": {
|
54 |
+
"content": "<|box_end|>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"151650": {
|
62 |
+
"content": "<|quad_start|>",
|
63 |
+
"lstrip": false,
|
64 |
+
"normalized": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"single_word": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
"151651": {
|
70 |
+
"content": "<|quad_end|>",
|
71 |
+
"lstrip": false,
|
72 |
+
"normalized": false,
|
73 |
+
"rstrip": false,
|
74 |
+
"single_word": false,
|
75 |
+
"special": true
|
76 |
+
},
|
77 |
+
"151652": {
|
78 |
+
"content": "<|vision_start|>",
|
79 |
+
"lstrip": false,
|
80 |
+
"normalized": false,
|
81 |
+
"rstrip": false,
|
82 |
+
"single_word": false,
|
83 |
+
"special": true
|
84 |
+
},
|
85 |
+
"151653": {
|
86 |
+
"content": "<|vision_end|>",
|
87 |
+
"lstrip": false,
|
88 |
+
"normalized": false,
|
89 |
+
"rstrip": false,
|
90 |
+
"single_word": false,
|
91 |
+
"special": true
|
92 |
+
},
|
93 |
+
"151654": {
|
94 |
+
"content": "<|vision_pad|>",
|
95 |
+
"lstrip": false,
|
96 |
+
"normalized": false,
|
97 |
+
"rstrip": false,
|
98 |
+
"single_word": false,
|
99 |
+
"special": true
|
100 |
+
},
|
101 |
+
"151655": {
|
102 |
+
"content": "<|image_pad|>",
|
103 |
+
"lstrip": false,
|
104 |
+
"normalized": false,
|
105 |
+
"rstrip": false,
|
106 |
+
"single_word": false,
|
107 |
+
"special": true
|
108 |
+
},
|
109 |
+
"151656": {
|
110 |
+
"content": "<|video_pad|>",
|
111 |
+
"lstrip": false,
|
112 |
+
"normalized": false,
|
113 |
+
"rstrip": false,
|
114 |
+
"single_word": false,
|
115 |
+
"special": true
|
116 |
+
},
|
117 |
+
"151657": {
|
118 |
+
"content": "<tool_call>",
|
119 |
+
"lstrip": false,
|
120 |
+
"normalized": false,
|
121 |
+
"rstrip": false,
|
122 |
+
"single_word": false,
|
123 |
+
"special": false
|
124 |
+
},
|
125 |
+
"151658": {
|
126 |
+
"content": "</tool_call>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false,
|
131 |
+
"special": false
|
132 |
+
},
|
133 |
+
"151659": {
|
134 |
+
"content": "<|fim_prefix|>",
|
135 |
+
"lstrip": false,
|
136 |
+
"normalized": false,
|
137 |
+
"rstrip": false,
|
138 |
+
"single_word": false,
|
139 |
+
"special": false
|
140 |
+
},
|
141 |
+
"151660": {
|
142 |
+
"content": "<|fim_middle|>",
|
143 |
+
"lstrip": false,
|
144 |
+
"normalized": false,
|
145 |
+
"rstrip": false,
|
146 |
+
"single_word": false,
|
147 |
+
"special": false
|
148 |
+
},
|
149 |
+
"151661": {
|
150 |
+
"content": "<|fim_suffix|>",
|
151 |
+
"lstrip": false,
|
152 |
+
"normalized": false,
|
153 |
+
"rstrip": false,
|
154 |
+
"single_word": false,
|
155 |
+
"special": false
|
156 |
+
},
|
157 |
+
"151662": {
|
158 |
+
"content": "<|fim_pad|>",
|
159 |
+
"lstrip": false,
|
160 |
+
"normalized": false,
|
161 |
+
"rstrip": false,
|
162 |
+
"single_word": false,
|
163 |
+
"special": false
|
164 |
+
},
|
165 |
+
"151663": {
|
166 |
+
"content": "<|repo_name|>",
|
167 |
+
"lstrip": false,
|
168 |
+
"normalized": false,
|
169 |
+
"rstrip": false,
|
170 |
+
"single_word": false,
|
171 |
+
"special": false
|
172 |
+
},
|
173 |
+
"151664": {
|
174 |
+
"content": "<|file_sep|>",
|
175 |
+
"lstrip": false,
|
176 |
+
"normalized": false,
|
177 |
+
"rstrip": false,
|
178 |
+
"single_word": false,
|
179 |
+
"special": false
|
180 |
+
}
|
181 |
+
},
|
182 |
+
"additional_special_tokens": [
|
183 |
+
"<|im_start|>",
|
184 |
+
"<|im_end|>",
|
185 |
+
"<|object_ref_start|>",
|
186 |
+
"<|object_ref_end|>",
|
187 |
+
"<|box_start|>",
|
188 |
+
"<|box_end|>",
|
189 |
+
"<|quad_start|>",
|
190 |
+
"<|quad_end|>",
|
191 |
+
"<|vision_start|>",
|
192 |
+
"<|vision_end|>",
|
193 |
+
"<|vision_pad|>",
|
194 |
+
"<|image_pad|>",
|
195 |
+
"<|video_pad|>"
|
196 |
+
],
|
197 |
+
"bos_token": null,
|
198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
199 |
+
"clean_up_tokenization_spaces": false,
|
200 |
+
"eos_token": "<|endoftext|>",
|
201 |
+
"errors": "replace",
|
202 |
+
"model_max_length": 131072,
|
203 |
+
"pad_token": "<|endoftext|>",
|
204 |
+
"padding_side": "right",
|
205 |
+
"split_special_tokens": false,
|
206 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
207 |
+
"unk_token": null
|
208 |
+
}
|
train_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 2.998563906175203,
|
3 |
+
"total_flos": 1806937112969216.0,
|
4 |
+
"train_loss": 0.43391252791516166,
|
5 |
+
"train_runtime": 96882.8659,
|
6 |
+
"train_samples_per_second": 0.517,
|
7 |
+
"train_steps_per_second": 0.005
|
8 |
+
}
|
trainer_log.jsonl
ADDED
The diff for this file is too large to render.
See raw diff
|
|
trainer_state.json
ADDED
@@ -0,0 +1,3696 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 2.998563906175203,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 522,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.0057443752991862135,
|
13 |
+
"grad_norm": 2.7811572551727295,
|
14 |
+
"learning_rate": 1.886792452830189e-07,
|
15 |
+
"loss": 0.7552,
|
16 |
+
"step": 1
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.011488750598372427,
|
20 |
+
"grad_norm": 2.7668187618255615,
|
21 |
+
"learning_rate": 3.773584905660378e-07,
|
22 |
+
"loss": 0.7816,
|
23 |
+
"step": 2
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.01723312589755864,
|
27 |
+
"grad_norm": 2.714698314666748,
|
28 |
+
"learning_rate": 5.660377358490567e-07,
|
29 |
+
"loss": 0.8084,
|
30 |
+
"step": 3
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.022977501196744854,
|
34 |
+
"grad_norm": 2.6264641284942627,
|
35 |
+
"learning_rate": 7.547169811320755e-07,
|
36 |
+
"loss": 0.7774,
|
37 |
+
"step": 4
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.028721876495931067,
|
41 |
+
"grad_norm": 2.5013437271118164,
|
42 |
+
"learning_rate": 9.433962264150944e-07,
|
43 |
+
"loss": 0.7726,
|
44 |
+
"step": 5
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.03446625179511728,
|
48 |
+
"grad_norm": 2.7753398418426514,
|
49 |
+
"learning_rate": 1.1320754716981133e-06,
|
50 |
+
"loss": 0.7708,
|
51 |
+
"step": 6
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.040210627094303494,
|
55 |
+
"grad_norm": 2.3244540691375732,
|
56 |
+
"learning_rate": 1.3207547169811322e-06,
|
57 |
+
"loss": 0.7759,
|
58 |
+
"step": 7
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.04595500239348971,
|
62 |
+
"grad_norm": 1.9560602903366089,
|
63 |
+
"learning_rate": 1.509433962264151e-06,
|
64 |
+
"loss": 0.7493,
|
65 |
+
"step": 8
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.05169937769267592,
|
69 |
+
"grad_norm": 1.9135451316833496,
|
70 |
+
"learning_rate": 1.6981132075471698e-06,
|
71 |
+
"loss": 0.7216,
|
72 |
+
"step": 9
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.057443752991862135,
|
76 |
+
"grad_norm": 1.8730158805847168,
|
77 |
+
"learning_rate": 1.8867924528301889e-06,
|
78 |
+
"loss": 0.7732,
|
79 |
+
"step": 10
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.06318812829104835,
|
83 |
+
"grad_norm": 1.520505666732788,
|
84 |
+
"learning_rate": 2.075471698113208e-06,
|
85 |
+
"loss": 0.7432,
|
86 |
+
"step": 11
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.06893250359023456,
|
90 |
+
"grad_norm": 1.342366099357605,
|
91 |
+
"learning_rate": 2.2641509433962266e-06,
|
92 |
+
"loss": 0.6762,
|
93 |
+
"step": 12
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.07467687888942078,
|
97 |
+
"grad_norm": 1.3205207586288452,
|
98 |
+
"learning_rate": 2.4528301886792453e-06,
|
99 |
+
"loss": 0.7395,
|
100 |
+
"step": 13
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.08042125418860699,
|
104 |
+
"grad_norm": 1.3457564115524292,
|
105 |
+
"learning_rate": 2.6415094339622644e-06,
|
106 |
+
"loss": 0.7279,
|
107 |
+
"step": 14
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.0861656294877932,
|
111 |
+
"grad_norm": 1.1626056432724,
|
112 |
+
"learning_rate": 2.830188679245283e-06,
|
113 |
+
"loss": 0.7218,
|
114 |
+
"step": 15
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.09191000478697942,
|
118 |
+
"grad_norm": 1.1689456701278687,
|
119 |
+
"learning_rate": 3.018867924528302e-06,
|
120 |
+
"loss": 0.7202,
|
121 |
+
"step": 16
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.09765438008616563,
|
125 |
+
"grad_norm": 1.100240707397461,
|
126 |
+
"learning_rate": 3.207547169811321e-06,
|
127 |
+
"loss": 0.6957,
|
128 |
+
"step": 17
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.10339875538535184,
|
132 |
+
"grad_norm": 0.9969179630279541,
|
133 |
+
"learning_rate": 3.3962264150943395e-06,
|
134 |
+
"loss": 0.695,
|
135 |
+
"step": 18
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.10914313068453806,
|
139 |
+
"grad_norm": 0.8645837306976318,
|
140 |
+
"learning_rate": 3.5849056603773586e-06,
|
141 |
+
"loss": 0.6393,
|
142 |
+
"step": 19
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.11488750598372427,
|
146 |
+
"grad_norm": 0.726235568523407,
|
147 |
+
"learning_rate": 3.7735849056603777e-06,
|
148 |
+
"loss": 0.6471,
|
149 |
+
"step": 20
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.12063188128291048,
|
153 |
+
"grad_norm": 0.7744720578193665,
|
154 |
+
"learning_rate": 3.962264150943396e-06,
|
155 |
+
"loss": 0.6303,
|
156 |
+
"step": 21
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.1263762565820967,
|
160 |
+
"grad_norm": 0.8271118998527527,
|
161 |
+
"learning_rate": 4.150943396226416e-06,
|
162 |
+
"loss": 0.6215,
|
163 |
+
"step": 22
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.13212063188128292,
|
167 |
+
"grad_norm": 0.7437446713447571,
|
168 |
+
"learning_rate": 4.339622641509435e-06,
|
169 |
+
"loss": 0.5848,
|
170 |
+
"step": 23
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.13786500718046912,
|
174 |
+
"grad_norm": 0.6880975365638733,
|
175 |
+
"learning_rate": 4.528301886792453e-06,
|
176 |
+
"loss": 0.6208,
|
177 |
+
"step": 24
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.14360938247965535,
|
181 |
+
"grad_norm": 0.6708999872207642,
|
182 |
+
"learning_rate": 4.716981132075472e-06,
|
183 |
+
"loss": 0.6067,
|
184 |
+
"step": 25
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.14935375777884155,
|
188 |
+
"grad_norm": 0.5787381529808044,
|
189 |
+
"learning_rate": 4.905660377358491e-06,
|
190 |
+
"loss": 0.5981,
|
191 |
+
"step": 26
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.15509813307802778,
|
195 |
+
"grad_norm": 0.6275501847267151,
|
196 |
+
"learning_rate": 5.09433962264151e-06,
|
197 |
+
"loss": 0.6233,
|
198 |
+
"step": 27
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.16084250837721398,
|
202 |
+
"grad_norm": 0.6212226748466492,
|
203 |
+
"learning_rate": 5.283018867924529e-06,
|
204 |
+
"loss": 0.5856,
|
205 |
+
"step": 28
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.1665868836764002,
|
209 |
+
"grad_norm": 0.5922460556030273,
|
210 |
+
"learning_rate": 5.4716981132075475e-06,
|
211 |
+
"loss": 0.5698,
|
212 |
+
"step": 29
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.1723312589755864,
|
216 |
+
"grad_norm": 0.5896409749984741,
|
217 |
+
"learning_rate": 5.660377358490566e-06,
|
218 |
+
"loss": 0.5768,
|
219 |
+
"step": 30
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.17807563427477263,
|
223 |
+
"grad_norm": 0.5254840850830078,
|
224 |
+
"learning_rate": 5.849056603773585e-06,
|
225 |
+
"loss": 0.539,
|
226 |
+
"step": 31
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.18382000957395883,
|
230 |
+
"grad_norm": 0.5485825538635254,
|
231 |
+
"learning_rate": 6.037735849056604e-06,
|
232 |
+
"loss": 0.5176,
|
233 |
+
"step": 32
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.18956438487314506,
|
237 |
+
"grad_norm": 0.5224868655204773,
|
238 |
+
"learning_rate": 6.226415094339623e-06,
|
239 |
+
"loss": 0.5541,
|
240 |
+
"step": 33
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.19530876017233126,
|
244 |
+
"grad_norm": 0.521843433380127,
|
245 |
+
"learning_rate": 6.415094339622642e-06,
|
246 |
+
"loss": 0.5707,
|
247 |
+
"step": 34
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.20105313547151749,
|
251 |
+
"grad_norm": 0.5119784474372864,
|
252 |
+
"learning_rate": 6.60377358490566e-06,
|
253 |
+
"loss": 0.5567,
|
254 |
+
"step": 35
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.20679751077070369,
|
258 |
+
"grad_norm": 0.5173502564430237,
|
259 |
+
"learning_rate": 6.792452830188679e-06,
|
260 |
+
"loss": 0.5481,
|
261 |
+
"step": 36
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.2125418860698899,
|
265 |
+
"grad_norm": 0.41040942072868347,
|
266 |
+
"learning_rate": 6.981132075471699e-06,
|
267 |
+
"loss": 0.5361,
|
268 |
+
"step": 37
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.2182862613690761,
|
272 |
+
"grad_norm": 0.4279431104660034,
|
273 |
+
"learning_rate": 7.169811320754717e-06,
|
274 |
+
"loss": 0.5454,
|
275 |
+
"step": 38
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.22403063666826234,
|
279 |
+
"grad_norm": 0.4288845956325531,
|
280 |
+
"learning_rate": 7.358490566037736e-06,
|
281 |
+
"loss": 0.5459,
|
282 |
+
"step": 39
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.22977501196744854,
|
286 |
+
"grad_norm": 0.4141804277896881,
|
287 |
+
"learning_rate": 7.5471698113207555e-06,
|
288 |
+
"loss": 0.5267,
|
289 |
+
"step": 40
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.23551938726663477,
|
293 |
+
"grad_norm": 4.088698387145996,
|
294 |
+
"learning_rate": 7.735849056603775e-06,
|
295 |
+
"loss": 0.5617,
|
296 |
+
"step": 41
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.24126376256582097,
|
300 |
+
"grad_norm": 0.40810075402259827,
|
301 |
+
"learning_rate": 7.924528301886793e-06,
|
302 |
+
"loss": 0.5066,
|
303 |
+
"step": 42
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.2470081378650072,
|
307 |
+
"grad_norm": 0.4773496687412262,
|
308 |
+
"learning_rate": 8.113207547169812e-06,
|
309 |
+
"loss": 0.5375,
|
310 |
+
"step": 43
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.2527525131641934,
|
314 |
+
"grad_norm": 0.3834821581840515,
|
315 |
+
"learning_rate": 8.301886792452832e-06,
|
316 |
+
"loss": 0.5502,
|
317 |
+
"step": 44
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.2584968884633796,
|
321 |
+
"grad_norm": 0.38557955622673035,
|
322 |
+
"learning_rate": 8.49056603773585e-06,
|
323 |
+
"loss": 0.5338,
|
324 |
+
"step": 45
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.26424126376256585,
|
328 |
+
"grad_norm": 0.3510483503341675,
|
329 |
+
"learning_rate": 8.67924528301887e-06,
|
330 |
+
"loss": 0.4924,
|
331 |
+
"step": 46
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.26998563906175205,
|
335 |
+
"grad_norm": 0.3511549234390259,
|
336 |
+
"learning_rate": 8.867924528301887e-06,
|
337 |
+
"loss": 0.5186,
|
338 |
+
"step": 47
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.27573001436093825,
|
342 |
+
"grad_norm": 0.41062384843826294,
|
343 |
+
"learning_rate": 9.056603773584907e-06,
|
344 |
+
"loss": 0.5021,
|
345 |
+
"step": 48
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.28147438966012445,
|
349 |
+
"grad_norm": 0.3798637092113495,
|
350 |
+
"learning_rate": 9.245283018867926e-06,
|
351 |
+
"loss": 0.5338,
|
352 |
+
"step": 49
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 0.2872187649593107,
|
356 |
+
"grad_norm": 0.35235193371772766,
|
357 |
+
"learning_rate": 9.433962264150944e-06,
|
358 |
+
"loss": 0.4922,
|
359 |
+
"step": 50
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 0.2929631402584969,
|
363 |
+
"grad_norm": 0.3465921878814697,
|
364 |
+
"learning_rate": 9.622641509433963e-06,
|
365 |
+
"loss": 0.4825,
|
366 |
+
"step": 51
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"epoch": 0.2987075155576831,
|
370 |
+
"grad_norm": 0.36125242710113525,
|
371 |
+
"learning_rate": 9.811320754716981e-06,
|
372 |
+
"loss": 0.5178,
|
373 |
+
"step": 52
|
374 |
+
},
|
375 |
+
{
|
376 |
+
"epoch": 0.3044518908568693,
|
377 |
+
"grad_norm": 0.3282877504825592,
|
378 |
+
"learning_rate": 1e-05,
|
379 |
+
"loss": 0.494,
|
380 |
+
"step": 53
|
381 |
+
},
|
382 |
+
{
|
383 |
+
"epoch": 0.31019626615605556,
|
384 |
+
"grad_norm": 0.3224699795246124,
|
385 |
+
"learning_rate": 9.999887825938495e-06,
|
386 |
+
"loss": 0.4878,
|
387 |
+
"step": 54
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"epoch": 0.31594064145524176,
|
391 |
+
"grad_norm": 0.36641696095466614,
|
392 |
+
"learning_rate": 9.999551308787183e-06,
|
393 |
+
"loss": 0.4905,
|
394 |
+
"step": 55
|
395 |
+
},
|
396 |
+
{
|
397 |
+
"epoch": 0.32168501675442795,
|
398 |
+
"grad_norm": 0.321207195520401,
|
399 |
+
"learning_rate": 9.998990463645464e-06,
|
400 |
+
"loss": 0.4956,
|
401 |
+
"step": 56
|
402 |
+
},
|
403 |
+
{
|
404 |
+
"epoch": 0.32742939205361415,
|
405 |
+
"grad_norm": 0.32767999172210693,
|
406 |
+
"learning_rate": 9.998205315678248e-06,
|
407 |
+
"loss": 0.5046,
|
408 |
+
"step": 57
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"epoch": 0.3331737673528004,
|
412 |
+
"grad_norm": 0.3131692409515381,
|
413 |
+
"learning_rate": 9.997195900114833e-06,
|
414 |
+
"loss": 0.4961,
|
415 |
+
"step": 58
|
416 |
+
},
|
417 |
+
{
|
418 |
+
"epoch": 0.3389181426519866,
|
419 |
+
"grad_norm": 0.304837167263031,
|
420 |
+
"learning_rate": 9.995962262247314e-06,
|
421 |
+
"loss": 0.4712,
|
422 |
+
"step": 59
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"epoch": 0.3446625179511728,
|
426 |
+
"grad_norm": 0.3071947395801544,
|
427 |
+
"learning_rate": 9.994504457428557e-06,
|
428 |
+
"loss": 0.4851,
|
429 |
+
"step": 60
|
430 |
+
},
|
431 |
+
{
|
432 |
+
"epoch": 0.350406893250359,
|
433 |
+
"grad_norm": 0.344100683927536,
|
434 |
+
"learning_rate": 9.99282255106972e-06,
|
435 |
+
"loss": 0.4947,
|
436 |
+
"step": 61
|
437 |
+
},
|
438 |
+
{
|
439 |
+
"epoch": 0.35615126854954526,
|
440 |
+
"grad_norm": 0.32712873816490173,
|
441 |
+
"learning_rate": 9.99091661863731e-06,
|
442 |
+
"loss": 0.502,
|
443 |
+
"step": 62
|
444 |
+
},
|
445 |
+
{
|
446 |
+
"epoch": 0.36189564384873146,
|
447 |
+
"grad_norm": 0.2983171045780182,
|
448 |
+
"learning_rate": 9.988786745649798e-06,
|
449 |
+
"loss": 0.4892,
|
450 |
+
"step": 63
|
451 |
+
},
|
452 |
+
{
|
453 |
+
"epoch": 0.36764001914791766,
|
454 |
+
"grad_norm": 0.45408669114112854,
|
455 |
+
"learning_rate": 9.986433027673786e-06,
|
456 |
+
"loss": 0.4738,
|
457 |
+
"step": 64
|
458 |
+
},
|
459 |
+
{
|
460 |
+
"epoch": 0.37338439444710386,
|
461 |
+
"grad_norm": 0.34534087777137756,
|
462 |
+
"learning_rate": 9.983855570319716e-06,
|
463 |
+
"loss": 0.4915,
|
464 |
+
"step": 65
|
465 |
+
},
|
466 |
+
{
|
467 |
+
"epoch": 0.3791287697462901,
|
468 |
+
"grad_norm": 0.32527977228164673,
|
469 |
+
"learning_rate": 9.981054489237132e-06,
|
470 |
+
"loss": 0.4849,
|
471 |
+
"step": 66
|
472 |
+
},
|
473 |
+
{
|
474 |
+
"epoch": 0.3848731450454763,
|
475 |
+
"grad_norm": 0.2880174219608307,
|
476 |
+
"learning_rate": 9.978029910109491e-06,
|
477 |
+
"loss": 0.4474,
|
478 |
+
"step": 67
|
479 |
+
},
|
480 |
+
{
|
481 |
+
"epoch": 0.3906175203446625,
|
482 |
+
"grad_norm": 0.3428208529949188,
|
483 |
+
"learning_rate": 9.974781968648523e-06,
|
484 |
+
"loss": 0.4731,
|
485 |
+
"step": 68
|
486 |
+
},
|
487 |
+
{
|
488 |
+
"epoch": 0.3963618956438487,
|
489 |
+
"grad_norm": 0.33169832825660706,
|
490 |
+
"learning_rate": 9.971310810588141e-06,
|
491 |
+
"loss": 0.4951,
|
492 |
+
"step": 69
|
493 |
+
},
|
494 |
+
{
|
495 |
+
"epoch": 0.40210627094303497,
|
496 |
+
"grad_norm": 0.3041759729385376,
|
497 |
+
"learning_rate": 9.967616591677906e-06,
|
498 |
+
"loss": 0.4278,
|
499 |
+
"step": 70
|
500 |
+
},
|
501 |
+
{
|
502 |
+
"epoch": 0.40785064624222117,
|
503 |
+
"grad_norm": 0.3041941225528717,
|
504 |
+
"learning_rate": 9.963699477676031e-06,
|
505 |
+
"loss": 0.4755,
|
506 |
+
"step": 71
|
507 |
+
},
|
508 |
+
{
|
509 |
+
"epoch": 0.41359502154140737,
|
510 |
+
"grad_norm": 0.30077141523361206,
|
511 |
+
"learning_rate": 9.959559644341954e-06,
|
512 |
+
"loss": 0.4572,
|
513 |
+
"step": 72
|
514 |
+
},
|
515 |
+
{
|
516 |
+
"epoch": 0.41933939684059357,
|
517 |
+
"grad_norm": 0.36518850922584534,
|
518 |
+
"learning_rate": 9.95519727742844e-06,
|
519 |
+
"loss": 0.4752,
|
520 |
+
"step": 73
|
521 |
+
},
|
522 |
+
{
|
523 |
+
"epoch": 0.4250837721397798,
|
524 |
+
"grad_norm": 0.2795243263244629,
|
525 |
+
"learning_rate": 9.950612572673255e-06,
|
526 |
+
"loss": 0.4759,
|
527 |
+
"step": 74
|
528 |
+
},
|
529 |
+
{
|
530 |
+
"epoch": 0.430828147438966,
|
531 |
+
"grad_norm": 0.30531245470046997,
|
532 |
+
"learning_rate": 9.945805735790383e-06,
|
533 |
+
"loss": 0.4897,
|
534 |
+
"step": 75
|
535 |
+
},
|
536 |
+
{
|
537 |
+
"epoch": 0.4365725227381522,
|
538 |
+
"grad_norm": 0.31547942757606506,
|
539 |
+
"learning_rate": 9.940776982460787e-06,
|
540 |
+
"loss": 0.466,
|
541 |
+
"step": 76
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"epoch": 0.4423168980373384,
|
545 |
+
"grad_norm": 0.276068776845932,
|
546 |
+
"learning_rate": 9.935526538322744e-06,
|
547 |
+
"loss": 0.4882,
|
548 |
+
"step": 77
|
549 |
+
},
|
550 |
+
{
|
551 |
+
"epoch": 0.4480612733365247,
|
552 |
+
"grad_norm": 0.34043920040130615,
|
553 |
+
"learning_rate": 9.930054638961709e-06,
|
554 |
+
"loss": 0.4685,
|
555 |
+
"step": 78
|
556 |
+
},
|
557 |
+
{
|
558 |
+
"epoch": 0.4538056486357109,
|
559 |
+
"grad_norm": 0.2898786664009094,
|
560 |
+
"learning_rate": 9.924361529899754e-06,
|
561 |
+
"loss": 0.4756,
|
562 |
+
"step": 79
|
563 |
+
},
|
564 |
+
{
|
565 |
+
"epoch": 0.4595500239348971,
|
566 |
+
"grad_norm": 0.270378977060318,
|
567 |
+
"learning_rate": 9.918447466584545e-06,
|
568 |
+
"loss": 0.4599,
|
569 |
+
"step": 80
|
570 |
+
},
|
571 |
+
{
|
572 |
+
"epoch": 0.4652943992340833,
|
573 |
+
"grad_norm": 0.35345959663391113,
|
574 |
+
"learning_rate": 9.91231271437788e-06,
|
575 |
+
"loss": 0.4629,
|
576 |
+
"step": 81
|
577 |
+
},
|
578 |
+
{
|
579 |
+
"epoch": 0.47103877453326953,
|
580 |
+
"grad_norm": 0.3456457555294037,
|
581 |
+
"learning_rate": 9.905957548543794e-06,
|
582 |
+
"loss": 0.481,
|
583 |
+
"step": 82
|
584 |
+
},
|
585 |
+
{
|
586 |
+
"epoch": 0.47678314983245573,
|
587 |
+
"grad_norm": 0.2923058271408081,
|
588 |
+
"learning_rate": 9.899382254236186e-06,
|
589 |
+
"loss": 0.4679,
|
590 |
+
"step": 83
|
591 |
+
},
|
592 |
+
{
|
593 |
+
"epoch": 0.48252752513164193,
|
594 |
+
"grad_norm": 0.3531160354614258,
|
595 |
+
"learning_rate": 9.892587126486046e-06,
|
596 |
+
"loss": 0.4906,
|
597 |
+
"step": 84
|
598 |
+
},
|
599 |
+
{
|
600 |
+
"epoch": 0.48827190043082813,
|
601 |
+
"grad_norm": 0.36479318141937256,
|
602 |
+
"learning_rate": 9.885572470188207e-06,
|
603 |
+
"loss": 0.4569,
|
604 |
+
"step": 85
|
605 |
+
},
|
606 |
+
{
|
607 |
+
"epoch": 0.4940162757300144,
|
608 |
+
"grad_norm": 0.33483925461769104,
|
609 |
+
"learning_rate": 9.878338600087658e-06,
|
610 |
+
"loss": 0.4668,
|
611 |
+
"step": 86
|
612 |
+
},
|
613 |
+
{
|
614 |
+
"epoch": 0.4997606510292006,
|
615 |
+
"grad_norm": 0.30246540904045105,
|
616 |
+
"learning_rate": 9.87088584076544e-06,
|
617 |
+
"loss": 0.4847,
|
618 |
+
"step": 87
|
619 |
+
},
|
620 |
+
{
|
621 |
+
"epoch": 0.5055050263283868,
|
622 |
+
"grad_norm": 0.3419553339481354,
|
623 |
+
"learning_rate": 9.863214526624065e-06,
|
624 |
+
"loss": 0.4698,
|
625 |
+
"step": 88
|
626 |
+
},
|
627 |
+
{
|
628 |
+
"epoch": 0.511249401627573,
|
629 |
+
"grad_norm": 0.30689287185668945,
|
630 |
+
"learning_rate": 9.85532500187252e-06,
|
631 |
+
"loss": 0.4646,
|
632 |
+
"step": 89
|
633 |
+
},
|
634 |
+
{
|
635 |
+
"epoch": 0.5169937769267592,
|
636 |
+
"grad_norm": 0.36106929183006287,
|
637 |
+
"learning_rate": 9.847217620510815e-06,
|
638 |
+
"loss": 0.4746,
|
639 |
+
"step": 90
|
640 |
+
},
|
641 |
+
{
|
642 |
+
"epoch": 0.5227381522259454,
|
643 |
+
"grad_norm": 0.29505378007888794,
|
644 |
+
"learning_rate": 9.83889274631411e-06,
|
645 |
+
"loss": 0.4571,
|
646 |
+
"step": 91
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"epoch": 0.5284825275251317,
|
650 |
+
"grad_norm": 0.29619649052619934,
|
651 |
+
"learning_rate": 9.830350752816386e-06,
|
652 |
+
"loss": 0.4578,
|
653 |
+
"step": 92
|
654 |
+
},
|
655 |
+
{
|
656 |
+
"epoch": 0.5342269028243178,
|
657 |
+
"grad_norm": 0.3188607692718506,
|
658 |
+
"learning_rate": 9.821592023293686e-06,
|
659 |
+
"loss": 0.4508,
|
660 |
+
"step": 93
|
661 |
+
},
|
662 |
+
{
|
663 |
+
"epoch": 0.5399712781235041,
|
664 |
+
"grad_norm": 0.2994546592235565,
|
665 |
+
"learning_rate": 9.81261695074691e-06,
|
666 |
+
"loss": 0.4774,
|
667 |
+
"step": 94
|
668 |
+
},
|
669 |
+
{
|
670 |
+
"epoch": 0.5457156534226902,
|
671 |
+
"grad_norm": 0.27736401557922363,
|
672 |
+
"learning_rate": 9.803425937884202e-06,
|
673 |
+
"loss": 0.4627,
|
674 |
+
"step": 95
|
675 |
+
},
|
676 |
+
{
|
677 |
+
"epoch": 0.5514600287218765,
|
678 |
+
"grad_norm": 0.313815176486969,
|
679 |
+
"learning_rate": 9.794019397102852e-06,
|
680 |
+
"loss": 0.4753,
|
681 |
+
"step": 96
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"epoch": 0.5572044040210627,
|
685 |
+
"grad_norm": 0.3198351562023163,
|
686 |
+
"learning_rate": 9.784397750470818e-06,
|
687 |
+
"loss": 0.45,
|
688 |
+
"step": 97
|
689 |
+
},
|
690 |
+
{
|
691 |
+
"epoch": 0.5629487793202489,
|
692 |
+
"grad_norm": 0.3304007351398468,
|
693 |
+
"learning_rate": 9.774561429707769e-06,
|
694 |
+
"loss": 0.4879,
|
695 |
+
"step": 98
|
696 |
+
},
|
697 |
+
{
|
698 |
+
"epoch": 0.5686931546194351,
|
699 |
+
"grad_norm": 0.2905180752277374,
|
700 |
+
"learning_rate": 9.764510876165727e-06,
|
701 |
+
"loss": 0.448,
|
702 |
+
"step": 99
|
703 |
+
},
|
704 |
+
{
|
705 |
+
"epoch": 0.5744375299186214,
|
706 |
+
"grad_norm": 0.31816208362579346,
|
707 |
+
"learning_rate": 9.754246540809257e-06,
|
708 |
+
"loss": 0.4483,
|
709 |
+
"step": 100
|
710 |
+
},
|
711 |
+
{
|
712 |
+
"epoch": 0.5801819052178075,
|
713 |
+
"grad_norm": 0.3271932005882263,
|
714 |
+
"learning_rate": 9.743768884195233e-06,
|
715 |
+
"loss": 0.4772,
|
716 |
+
"step": 101
|
717 |
+
},
|
718 |
+
{
|
719 |
+
"epoch": 0.5859262805169938,
|
720 |
+
"grad_norm": 0.3400787115097046,
|
721 |
+
"learning_rate": 9.733078376452172e-06,
|
722 |
+
"loss": 0.4762,
|
723 |
+
"step": 102
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"epoch": 0.59167065581618,
|
727 |
+
"grad_norm": 0.3152044415473938,
|
728 |
+
"learning_rate": 9.722175497259145e-06,
|
729 |
+
"loss": 0.4625,
|
730 |
+
"step": 103
|
731 |
+
},
|
732 |
+
{
|
733 |
+
"epoch": 0.5974150311153662,
|
734 |
+
"grad_norm": 0.29500487446784973,
|
735 |
+
"learning_rate": 9.71106073582425e-06,
|
736 |
+
"loss": 0.4513,
|
737 |
+
"step": 104
|
738 |
+
},
|
739 |
+
{
|
740 |
+
"epoch": 0.6031594064145525,
|
741 |
+
"grad_norm": 0.30806249380111694,
|
742 |
+
"learning_rate": 9.699734590862655e-06,
|
743 |
+
"loss": 0.4493,
|
744 |
+
"step": 105
|
745 |
+
},
|
746 |
+
{
|
747 |
+
"epoch": 0.6089037817137386,
|
748 |
+
"grad_norm": 0.32135871052742004,
|
749 |
+
"learning_rate": 9.688197570574238e-06,
|
750 |
+
"loss": 0.4687,
|
751 |
+
"step": 106
|
752 |
+
},
|
753 |
+
{
|
754 |
+
"epoch": 0.6146481570129249,
|
755 |
+
"grad_norm": 0.2976573705673218,
|
756 |
+
"learning_rate": 9.676450192620767e-06,
|
757 |
+
"loss": 0.464,
|
758 |
+
"step": 107
|
759 |
+
},
|
760 |
+
{
|
761 |
+
"epoch": 0.6203925323121111,
|
762 |
+
"grad_norm": 0.29863908886909485,
|
763 |
+
"learning_rate": 9.66449298410268e-06,
|
764 |
+
"loss": 0.4816,
|
765 |
+
"step": 108
|
766 |
+
},
|
767 |
+
{
|
768 |
+
"epoch": 0.6261369076112973,
|
769 |
+
"grad_norm": 0.31702297925949097,
|
770 |
+
"learning_rate": 9.652326481535434e-06,
|
771 |
+
"loss": 0.4278,
|
772 |
+
"step": 109
|
773 |
+
},
|
774 |
+
{
|
775 |
+
"epoch": 0.6318812829104835,
|
776 |
+
"grad_norm": 0.3526081442832947,
|
777 |
+
"learning_rate": 9.639951230825433e-06,
|
778 |
+
"loss": 0.4931,
|
779 |
+
"step": 110
|
780 |
+
},
|
781 |
+
{
|
782 |
+
"epoch": 0.6376256582096697,
|
783 |
+
"grad_norm": 0.33451569080352783,
|
784 |
+
"learning_rate": 9.62736778724553e-06,
|
785 |
+
"loss": 0.475,
|
786 |
+
"step": 111
|
787 |
+
},
|
788 |
+
{
|
789 |
+
"epoch": 0.6433700335088559,
|
790 |
+
"grad_norm": 0.33537557721138,
|
791 |
+
"learning_rate": 9.614576715410116e-06,
|
792 |
+
"loss": 0.4799,
|
793 |
+
"step": 112
|
794 |
+
},
|
795 |
+
{
|
796 |
+
"epoch": 0.6491144088080422,
|
797 |
+
"grad_norm": 0.3408398926258087,
|
798 |
+
"learning_rate": 9.60157858924978e-06,
|
799 |
+
"loss": 0.4483,
|
800 |
+
"step": 113
|
801 |
+
},
|
802 |
+
{
|
803 |
+
"epoch": 0.6548587841072283,
|
804 |
+
"grad_norm": 0.3780161440372467,
|
805 |
+
"learning_rate": 9.588373991985566e-06,
|
806 |
+
"loss": 0.4684,
|
807 |
+
"step": 114
|
808 |
+
},
|
809 |
+
{
|
810 |
+
"epoch": 0.6606031594064146,
|
811 |
+
"grad_norm": 0.35523924231529236,
|
812 |
+
"learning_rate": 9.574963516102795e-06,
|
813 |
+
"loss": 0.4783,
|
814 |
+
"step": 115
|
815 |
+
},
|
816 |
+
{
|
817 |
+
"epoch": 0.6663475347056008,
|
818 |
+
"grad_norm": 0.35043102502822876,
|
819 |
+
"learning_rate": 9.561347763324484e-06,
|
820 |
+
"loss": 0.4462,
|
821 |
+
"step": 116
|
822 |
+
},
|
823 |
+
{
|
824 |
+
"epoch": 0.672091910004787,
|
825 |
+
"grad_norm": 0.388933390378952,
|
826 |
+
"learning_rate": 9.547527344584353e-06,
|
827 |
+
"loss": 0.4617,
|
828 |
+
"step": 117
|
829 |
+
},
|
830 |
+
{
|
831 |
+
"epoch": 0.6778362853039732,
|
832 |
+
"grad_norm": 0.32936355471611023,
|
833 |
+
"learning_rate": 9.533502879999398e-06,
|
834 |
+
"loss": 0.48,
|
835 |
+
"step": 118
|
836 |
+
},
|
837 |
+
{
|
838 |
+
"epoch": 0.6835806606031594,
|
839 |
+
"grad_norm": 0.3353422284126282,
|
840 |
+
"learning_rate": 9.519274998842084e-06,
|
841 |
+
"loss": 0.4832,
|
842 |
+
"step": 119
|
843 |
+
},
|
844 |
+
{
|
845 |
+
"epoch": 0.6893250359023456,
|
846 |
+
"grad_norm": 0.36188051104545593,
|
847 |
+
"learning_rate": 9.504844339512096e-06,
|
848 |
+
"loss": 0.4527,
|
849 |
+
"step": 120
|
850 |
+
},
|
851 |
+
{
|
852 |
+
"epoch": 0.6950694112015319,
|
853 |
+
"grad_norm": 0.30944687128067017,
|
854 |
+
"learning_rate": 9.490211549507701e-06,
|
855 |
+
"loss": 0.4712,
|
856 |
+
"step": 121
|
857 |
+
},
|
858 |
+
{
|
859 |
+
"epoch": 0.700813786500718,
|
860 |
+
"grad_norm": 0.3265434801578522,
|
861 |
+
"learning_rate": 9.475377285396692e-06,
|
862 |
+
"loss": 0.4505,
|
863 |
+
"step": 122
|
864 |
+
},
|
865 |
+
{
|
866 |
+
"epoch": 0.7065581617999043,
|
867 |
+
"grad_norm": 0.3855360150337219,
|
868 |
+
"learning_rate": 9.460342212786933e-06,
|
869 |
+
"loss": 0.4417,
|
870 |
+
"step": 123
|
871 |
+
},
|
872 |
+
{
|
873 |
+
"epoch": 0.7123025370990905,
|
874 |
+
"grad_norm": 0.32236477732658386,
|
875 |
+
"learning_rate": 9.445107006296488e-06,
|
876 |
+
"loss": 0.4765,
|
877 |
+
"step": 124
|
878 |
+
},
|
879 |
+
{
|
880 |
+
"epoch": 0.7180469123982767,
|
881 |
+
"grad_norm": 0.32656142115592957,
|
882 |
+
"learning_rate": 9.42967234952335e-06,
|
883 |
+
"loss": 0.4829,
|
884 |
+
"step": 125
|
885 |
+
},
|
886 |
+
{
|
887 |
+
"epoch": 0.7237912876974629,
|
888 |
+
"grad_norm": 0.3501734435558319,
|
889 |
+
"learning_rate": 9.414038935014777e-06,
|
890 |
+
"loss": 0.4471,
|
891 |
+
"step": 126
|
892 |
+
},
|
893 |
+
{
|
894 |
+
"epoch": 0.7295356629966491,
|
895 |
+
"grad_norm": 0.32487544417381287,
|
896 |
+
"learning_rate": 9.398207464236209e-06,
|
897 |
+
"loss": 0.4483,
|
898 |
+
"step": 127
|
899 |
+
},
|
900 |
+
{
|
901 |
+
"epoch": 0.7352800382958353,
|
902 |
+
"grad_norm": 0.31272241473197937,
|
903 |
+
"learning_rate": 9.382178647539794e-06,
|
904 |
+
"loss": 0.4545,
|
905 |
+
"step": 128
|
906 |
+
},
|
907 |
+
{
|
908 |
+
"epoch": 0.7410244135950216,
|
909 |
+
"grad_norm": 0.3410162031650543,
|
910 |
+
"learning_rate": 9.365953204132526e-06,
|
911 |
+
"loss": 0.4642,
|
912 |
+
"step": 129
|
913 |
+
},
|
914 |
+
{
|
915 |
+
"epoch": 0.7467687888942077,
|
916 |
+
"grad_norm": 0.327289342880249,
|
917 |
+
"learning_rate": 9.349531862043952e-06,
|
918 |
+
"loss": 0.4794,
|
919 |
+
"step": 130
|
920 |
+
},
|
921 |
+
{
|
922 |
+
"epoch": 0.752513164193394,
|
923 |
+
"grad_norm": 0.3137280344963074,
|
924 |
+
"learning_rate": 9.332915358093532e-06,
|
925 |
+
"loss": 0.4453,
|
926 |
+
"step": 131
|
927 |
+
},
|
928 |
+
{
|
929 |
+
"epoch": 0.7582575394925802,
|
930 |
+
"grad_norm": 0.3067407011985779,
|
931 |
+
"learning_rate": 9.316104437857561e-06,
|
932 |
+
"loss": 0.4506,
|
933 |
+
"step": 132
|
934 |
+
},
|
935 |
+
{
|
936 |
+
"epoch": 0.7640019147917664,
|
937 |
+
"grad_norm": 0.3284437656402588,
|
938 |
+
"learning_rate": 9.299099855635716e-06,
|
939 |
+
"loss": 0.4443,
|
940 |
+
"step": 133
|
941 |
+
},
|
942 |
+
{
|
943 |
+
"epoch": 0.7697462900909526,
|
944 |
+
"grad_norm": 0.4092399477958679,
|
945 |
+
"learning_rate": 9.28190237441722e-06,
|
946 |
+
"loss": 0.4351,
|
947 |
+
"step": 134
|
948 |
+
},
|
949 |
+
{
|
950 |
+
"epoch": 0.7754906653901388,
|
951 |
+
"grad_norm": 0.3092726469039917,
|
952 |
+
"learning_rate": 9.2645127658466e-06,
|
953 |
+
"loss": 0.4532,
|
954 |
+
"step": 135
|
955 |
+
},
|
956 |
+
{
|
957 |
+
"epoch": 0.781235040689325,
|
958 |
+
"grad_norm": 0.34482482075691223,
|
959 |
+
"learning_rate": 9.246931810189061e-06,
|
960 |
+
"loss": 0.4626,
|
961 |
+
"step": 136
|
962 |
+
},
|
963 |
+
{
|
964 |
+
"epoch": 0.7869794159885113,
|
965 |
+
"grad_norm": 0.3547237813472748,
|
966 |
+
"learning_rate": 9.229160296295488e-06,
|
967 |
+
"loss": 0.4305,
|
968 |
+
"step": 137
|
969 |
+
},
|
970 |
+
{
|
971 |
+
"epoch": 0.7927237912876974,
|
972 |
+
"grad_norm": 0.32667168974876404,
|
973 |
+
"learning_rate": 9.211199021567034e-06,
|
974 |
+
"loss": 0.4675,
|
975 |
+
"step": 138
|
976 |
+
},
|
977 |
+
{
|
978 |
+
"epoch": 0.7984681665868837,
|
979 |
+
"grad_norm": 0.3909010589122772,
|
980 |
+
"learning_rate": 9.193048791919357e-06,
|
981 |
+
"loss": 0.4743,
|
982 |
+
"step": 139
|
983 |
+
},
|
984 |
+
{
|
985 |
+
"epoch": 0.8042125418860699,
|
986 |
+
"grad_norm": 0.31883516907691956,
|
987 |
+
"learning_rate": 9.174710421746445e-06,
|
988 |
+
"loss": 0.4824,
|
989 |
+
"step": 140
|
990 |
+
},
|
991 |
+
{
|
992 |
+
"epoch": 0.8099569171852561,
|
993 |
+
"grad_norm": 0.36612772941589355,
|
994 |
+
"learning_rate": 9.156184733884084e-06,
|
995 |
+
"loss": 0.4475,
|
996 |
+
"step": 141
|
997 |
+
},
|
998 |
+
{
|
999 |
+
"epoch": 0.8157012924844423,
|
1000 |
+
"grad_norm": 0.32956618070602417,
|
1001 |
+
"learning_rate": 9.137472559572935e-06,
|
1002 |
+
"loss": 0.4762,
|
1003 |
+
"step": 142
|
1004 |
+
},
|
1005 |
+
{
|
1006 |
+
"epoch": 0.8214456677836285,
|
1007 |
+
"grad_norm": 0.3318566679954529,
|
1008 |
+
"learning_rate": 9.118574738421236e-06,
|
1009 |
+
"loss": 0.4381,
|
1010 |
+
"step": 143
|
1011 |
+
},
|
1012 |
+
{
|
1013 |
+
"epoch": 0.8271900430828147,
|
1014 |
+
"grad_norm": 0.34344008564949036,
|
1015 |
+
"learning_rate": 9.099492118367123e-06,
|
1016 |
+
"loss": 0.4693,
|
1017 |
+
"step": 144
|
1018 |
+
},
|
1019 |
+
{
|
1020 |
+
"epoch": 0.832934418382001,
|
1021 |
+
"grad_norm": 0.2847115695476532,
|
1022 |
+
"learning_rate": 9.080225555640601e-06,
|
1023 |
+
"loss": 0.4552,
|
1024 |
+
"step": 145
|
1025 |
+
},
|
1026 |
+
{
|
1027 |
+
"epoch": 0.8386787936811871,
|
1028 |
+
"grad_norm": 0.31437164545059204,
|
1029 |
+
"learning_rate": 9.0607759147251e-06,
|
1030 |
+
"loss": 0.457,
|
1031 |
+
"step": 146
|
1032 |
+
},
|
1033 |
+
{
|
1034 |
+
"epoch": 0.8444231689803734,
|
1035 |
+
"grad_norm": 0.2915739417076111,
|
1036 |
+
"learning_rate": 9.04114406831871e-06,
|
1037 |
+
"loss": 0.4318,
|
1038 |
+
"step": 147
|
1039 |
+
},
|
1040 |
+
{
|
1041 |
+
"epoch": 0.8501675442795597,
|
1042 |
+
"grad_norm": 0.32622477412223816,
|
1043 |
+
"learning_rate": 9.021330897295011e-06,
|
1044 |
+
"loss": 0.456,
|
1045 |
+
"step": 148
|
1046 |
+
},
|
1047 |
+
{
|
1048 |
+
"epoch": 0.8559119195787458,
|
1049 |
+
"grad_norm": 0.3252268135547638,
|
1050 |
+
"learning_rate": 9.001337290663548e-06,
|
1051 |
+
"loss": 0.4397,
|
1052 |
+
"step": 149
|
1053 |
+
},
|
1054 |
+
{
|
1055 |
+
"epoch": 0.861656294877932,
|
1056 |
+
"grad_norm": 0.3453269898891449,
|
1057 |
+
"learning_rate": 8.981164145529943e-06,
|
1058 |
+
"loss": 0.4612,
|
1059 |
+
"step": 150
|
1060 |
+
},
|
1061 |
+
{
|
1062 |
+
"epoch": 0.8674006701771182,
|
1063 |
+
"grad_norm": 0.28802141547203064,
|
1064 |
+
"learning_rate": 8.960812367055646e-06,
|
1065 |
+
"loss": 0.473,
|
1066 |
+
"step": 151
|
1067 |
+
},
|
1068 |
+
{
|
1069 |
+
"epoch": 0.8731450454763044,
|
1070 |
+
"grad_norm": 0.27404195070266724,
|
1071 |
+
"learning_rate": 8.940282868417321e-06,
|
1072 |
+
"loss": 0.4133,
|
1073 |
+
"step": 152
|
1074 |
+
},
|
1075 |
+
{
|
1076 |
+
"epoch": 0.8788894207754907,
|
1077 |
+
"grad_norm": 0.3235178589820862,
|
1078 |
+
"learning_rate": 8.91957657076586e-06,
|
1079 |
+
"loss": 0.4658,
|
1080 |
+
"step": 153
|
1081 |
+
},
|
1082 |
+
{
|
1083 |
+
"epoch": 0.8846337960746768,
|
1084 |
+
"grad_norm": 0.29341384768486023,
|
1085 |
+
"learning_rate": 8.898694403185066e-06,
|
1086 |
+
"loss": 0.4287,
|
1087 |
+
"step": 154
|
1088 |
+
},
|
1089 |
+
{
|
1090 |
+
"epoch": 0.8903781713738631,
|
1091 |
+
"grad_norm": 0.335021436214447,
|
1092 |
+
"learning_rate": 8.877637302649962e-06,
|
1093 |
+
"loss": 0.4424,
|
1094 |
+
"step": 155
|
1095 |
+
},
|
1096 |
+
{
|
1097 |
+
"epoch": 0.8961225466730494,
|
1098 |
+
"grad_norm": 0.2819330096244812,
|
1099 |
+
"learning_rate": 8.856406213984743e-06,
|
1100 |
+
"loss": 0.4579,
|
1101 |
+
"step": 156
|
1102 |
+
},
|
1103 |
+
{
|
1104 |
+
"epoch": 0.9018669219722355,
|
1105 |
+
"grad_norm": 0.3036133646965027,
|
1106 |
+
"learning_rate": 8.835002089820387e-06,
|
1107 |
+
"loss": 0.4468,
|
1108 |
+
"step": 157
|
1109 |
+
},
|
1110 |
+
{
|
1111 |
+
"epoch": 0.9076112972714218,
|
1112 |
+
"grad_norm": 0.30535823106765747,
|
1113 |
+
"learning_rate": 8.81342589055191e-06,
|
1114 |
+
"loss": 0.4735,
|
1115 |
+
"step": 158
|
1116 |
+
},
|
1117 |
+
{
|
1118 |
+
"epoch": 0.9133556725706079,
|
1119 |
+
"grad_norm": 0.2687799036502838,
|
1120 |
+
"learning_rate": 8.791678584295276e-06,
|
1121 |
+
"loss": 0.4381,
|
1122 |
+
"step": 159
|
1123 |
+
},
|
1124 |
+
{
|
1125 |
+
"epoch": 0.9191000478697942,
|
1126 |
+
"grad_norm": 0.3096264898777008,
|
1127 |
+
"learning_rate": 8.76976114684395e-06,
|
1128 |
+
"loss": 0.4467,
|
1129 |
+
"step": 160
|
1130 |
+
},
|
1131 |
+
{
|
1132 |
+
"epoch": 0.9248444231689804,
|
1133 |
+
"grad_norm": 0.3003120422363281,
|
1134 |
+
"learning_rate": 8.747674561625121e-06,
|
1135 |
+
"loss": 0.4633,
|
1136 |
+
"step": 161
|
1137 |
+
},
|
1138 |
+
{
|
1139 |
+
"epoch": 0.9305887984681666,
|
1140 |
+
"grad_norm": 0.2911892533302307,
|
1141 |
+
"learning_rate": 8.725419819655582e-06,
|
1142 |
+
"loss": 0.4093,
|
1143 |
+
"step": 162
|
1144 |
+
},
|
1145 |
+
{
|
1146 |
+
"epoch": 0.9363331737673528,
|
1147 |
+
"grad_norm": 0.28195834159851074,
|
1148 |
+
"learning_rate": 8.702997919497247e-06,
|
1149 |
+
"loss": 0.4535,
|
1150 |
+
"step": 163
|
1151 |
+
},
|
1152 |
+
{
|
1153 |
+
"epoch": 0.9420775490665391,
|
1154 |
+
"grad_norm": 0.28224724531173706,
|
1155 |
+
"learning_rate": 8.680409867212359e-06,
|
1156 |
+
"loss": 0.4726,
|
1157 |
+
"step": 164
|
1158 |
+
},
|
1159 |
+
{
|
1160 |
+
"epoch": 0.9478219243657252,
|
1161 |
+
"grad_norm": 0.31931445002555847,
|
1162 |
+
"learning_rate": 8.657656676318346e-06,
|
1163 |
+
"loss": 0.467,
|
1164 |
+
"step": 165
|
1165 |
+
},
|
1166 |
+
{
|
1167 |
+
"epoch": 0.9535662996649115,
|
1168 |
+
"grad_norm": 0.3119199872016907,
|
1169 |
+
"learning_rate": 8.634739367742341e-06,
|
1170 |
+
"loss": 0.4248,
|
1171 |
+
"step": 166
|
1172 |
+
},
|
1173 |
+
{
|
1174 |
+
"epoch": 0.9593106749640976,
|
1175 |
+
"grad_norm": 0.2851564586162567,
|
1176 |
+
"learning_rate": 8.611658969775378e-06,
|
1177 |
+
"loss": 0.4421,
|
1178 |
+
"step": 167
|
1179 |
+
},
|
1180 |
+
{
|
1181 |
+
"epoch": 0.9650550502632839,
|
1182 |
+
"grad_norm": 0.287062406539917,
|
1183 |
+
"learning_rate": 8.588416518026248e-06,
|
1184 |
+
"loss": 0.4379,
|
1185 |
+
"step": 168
|
1186 |
+
},
|
1187 |
+
{
|
1188 |
+
"epoch": 0.9707994255624701,
|
1189 |
+
"grad_norm": 0.29976505041122437,
|
1190 |
+
"learning_rate": 8.565013055375035e-06,
|
1191 |
+
"loss": 0.453,
|
1192 |
+
"step": 169
|
1193 |
+
},
|
1194 |
+
{
|
1195 |
+
"epoch": 0.9765438008616563,
|
1196 |
+
"grad_norm": 0.3060532212257385,
|
1197 |
+
"learning_rate": 8.541449631926325e-06,
|
1198 |
+
"loss": 0.4484,
|
1199 |
+
"step": 170
|
1200 |
+
},
|
1201 |
+
{
|
1202 |
+
"epoch": 0.9822881761608425,
|
1203 |
+
"grad_norm": 0.29888424277305603,
|
1204 |
+
"learning_rate": 8.51772730496208e-06,
|
1205 |
+
"loss": 0.4728,
|
1206 |
+
"step": 171
|
1207 |
+
},
|
1208 |
+
{
|
1209 |
+
"epoch": 0.9880325514600288,
|
1210 |
+
"grad_norm": 0.31030842661857605,
|
1211 |
+
"learning_rate": 8.49384713889421e-06,
|
1212 |
+
"loss": 0.4441,
|
1213 |
+
"step": 172
|
1214 |
+
},
|
1215 |
+
{
|
1216 |
+
"epoch": 0.9937769267592149,
|
1217 |
+
"grad_norm": 0.28488606214523315,
|
1218 |
+
"learning_rate": 8.469810205216795e-06,
|
1219 |
+
"loss": 0.4784,
|
1220 |
+
"step": 173
|
1221 |
+
},
|
1222 |
+
{
|
1223 |
+
"epoch": 0.9995213020584012,
|
1224 |
+
"grad_norm": 0.2805924713611603,
|
1225 |
+
"learning_rate": 8.445617582458033e-06,
|
1226 |
+
"loss": 0.4643,
|
1227 |
+
"step": 174
|
1228 |
+
},
|
1229 |
+
{
|
1230 |
+
"epoch": 1.0052656773575874,
|
1231 |
+
"grad_norm": 0.9323565363883972,
|
1232 |
+
"learning_rate": 8.42127035613182e-06,
|
1233 |
+
"loss": 0.7769,
|
1234 |
+
"step": 175
|
1235 |
+
},
|
1236 |
+
{
|
1237 |
+
"epoch": 1.0110100526567736,
|
1238 |
+
"grad_norm": 0.28666186332702637,
|
1239 |
+
"learning_rate": 8.396769618689064e-06,
|
1240 |
+
"loss": 0.4213,
|
1241 |
+
"step": 176
|
1242 |
+
},
|
1243 |
+
{
|
1244 |
+
"epoch": 1.0167544279559597,
|
1245 |
+
"grad_norm": 0.30998125672340393,
|
1246 |
+
"learning_rate": 8.372116469468654e-06,
|
1247 |
+
"loss": 0.4429,
|
1248 |
+
"step": 177
|
1249 |
+
},
|
1250 |
+
{
|
1251 |
+
"epoch": 1.022498803255146,
|
1252 |
+
"grad_norm": 0.37547767162323,
|
1253 |
+
"learning_rate": 8.347312014648144e-06,
|
1254 |
+
"loss": 0.3927,
|
1255 |
+
"step": 178
|
1256 |
+
},
|
1257 |
+
{
|
1258 |
+
"epoch": 1.0282431785543322,
|
1259 |
+
"grad_norm": 0.31034451723098755,
|
1260 |
+
"learning_rate": 8.32235736719411e-06,
|
1261 |
+
"loss": 0.3735,
|
1262 |
+
"step": 179
|
1263 |
+
},
|
1264 |
+
{
|
1265 |
+
"epoch": 1.0339875538535184,
|
1266 |
+
"grad_norm": 0.4133894443511963,
|
1267 |
+
"learning_rate": 8.297253646812213e-06,
|
1268 |
+
"loss": 0.4508,
|
1269 |
+
"step": 180
|
1270 |
+
},
|
1271 |
+
{
|
1272 |
+
"epoch": 1.0397319291527047,
|
1273 |
+
"grad_norm": 0.3431154787540436,
|
1274 |
+
"learning_rate": 8.272001979896962e-06,
|
1275 |
+
"loss": 0.4064,
|
1276 |
+
"step": 181
|
1277 |
+
},
|
1278 |
+
{
|
1279 |
+
"epoch": 1.0454763044518909,
|
1280 |
+
"grad_norm": 0.31550031900405884,
|
1281 |
+
"learning_rate": 8.246603499481177e-06,
|
1282 |
+
"loss": 0.4075,
|
1283 |
+
"step": 182
|
1284 |
+
},
|
1285 |
+
{
|
1286 |
+
"epoch": 1.051220679751077,
|
1287 |
+
"grad_norm": 0.3756781816482544,
|
1288 |
+
"learning_rate": 8.221059345185136e-06,
|
1289 |
+
"loss": 0.4108,
|
1290 |
+
"step": 183
|
1291 |
+
},
|
1292 |
+
{
|
1293 |
+
"epoch": 1.0569650550502634,
|
1294 |
+
"grad_norm": 0.34178560972213745,
|
1295 |
+
"learning_rate": 8.195370663165455e-06,
|
1296 |
+
"loss": 0.4057,
|
1297 |
+
"step": 184
|
1298 |
+
},
|
1299 |
+
{
|
1300 |
+
"epoch": 1.0627094303494495,
|
1301 |
+
"grad_norm": 0.31101906299591064,
|
1302 |
+
"learning_rate": 8.169538606063647e-06,
|
1303 |
+
"loss": 0.4237,
|
1304 |
+
"step": 185
|
1305 |
+
},
|
1306 |
+
{
|
1307 |
+
"epoch": 1.0684538056486357,
|
1308 |
+
"grad_norm": 0.3273501694202423,
|
1309 |
+
"learning_rate": 8.143564332954426e-06,
|
1310 |
+
"loss": 0.4287,
|
1311 |
+
"step": 186
|
1312 |
+
},
|
1313 |
+
{
|
1314 |
+
"epoch": 1.0741981809478218,
|
1315 |
+
"grad_norm": 0.34117910265922546,
|
1316 |
+
"learning_rate": 8.117449009293668e-06,
|
1317 |
+
"loss": 0.4279,
|
1318 |
+
"step": 187
|
1319 |
+
},
|
1320 |
+
{
|
1321 |
+
"epoch": 1.0799425562470082,
|
1322 |
+
"grad_norm": 0.28906843066215515,
|
1323 |
+
"learning_rate": 8.091193806866147e-06,
|
1324 |
+
"loss": 0.4271,
|
1325 |
+
"step": 188
|
1326 |
+
},
|
1327 |
+
{
|
1328 |
+
"epoch": 1.0856869315461943,
|
1329 |
+
"grad_norm": 0.2958456575870514,
|
1330 |
+
"learning_rate": 8.064799903732936e-06,
|
1331 |
+
"loss": 0.3967,
|
1332 |
+
"step": 189
|
1333 |
+
},
|
1334 |
+
{
|
1335 |
+
"epoch": 1.0914313068453805,
|
1336 |
+
"grad_norm": 0.2881177067756653,
|
1337 |
+
"learning_rate": 8.038268484178566e-06,
|
1338 |
+
"loss": 0.4176,
|
1339 |
+
"step": 190
|
1340 |
+
},
|
1341 |
+
{
|
1342 |
+
"epoch": 1.0971756821445668,
|
1343 |
+
"grad_norm": 0.32246285676956177,
|
1344 |
+
"learning_rate": 8.011600738657865e-06,
|
1345 |
+
"loss": 0.4222,
|
1346 |
+
"step": 191
|
1347 |
+
},
|
1348 |
+
{
|
1349 |
+
"epoch": 1.102920057443753,
|
1350 |
+
"grad_norm": 0.3277246952056885,
|
1351 |
+
"learning_rate": 7.98479786374257e-06,
|
1352 |
+
"loss": 0.3853,
|
1353 |
+
"step": 192
|
1354 |
+
},
|
1355 |
+
{
|
1356 |
+
"epoch": 1.1086644327429391,
|
1357 |
+
"grad_norm": 0.3409973382949829,
|
1358 |
+
"learning_rate": 7.957861062067614e-06,
|
1359 |
+
"loss": 0.4156,
|
1360 |
+
"step": 193
|
1361 |
+
},
|
1362 |
+
{
|
1363 |
+
"epoch": 1.1144088080421255,
|
1364 |
+
"grad_norm": 0.2889862060546875,
|
1365 |
+
"learning_rate": 7.930791542277175e-06,
|
1366 |
+
"loss": 0.4143,
|
1367 |
+
"step": 194
|
1368 |
+
},
|
1369 |
+
{
|
1370 |
+
"epoch": 1.1201531833413116,
|
1371 |
+
"grad_norm": 0.3289618492126465,
|
1372 |
+
"learning_rate": 7.903590518970445e-06,
|
1373 |
+
"loss": 0.4363,
|
1374 |
+
"step": 195
|
1375 |
+
},
|
1376 |
+
{
|
1377 |
+
"epoch": 1.1258975586404978,
|
1378 |
+
"grad_norm": 0.2657482624053955,
|
1379 |
+
"learning_rate": 7.876259212647129e-06,
|
1380 |
+
"loss": 0.3729,
|
1381 |
+
"step": 196
|
1382 |
+
},
|
1383 |
+
{
|
1384 |
+
"epoch": 1.1316419339396842,
|
1385 |
+
"grad_norm": 0.31660833954811096,
|
1386 |
+
"learning_rate": 7.848798849652684e-06,
|
1387 |
+
"loss": 0.4256,
|
1388 |
+
"step": 197
|
1389 |
+
},
|
1390 |
+
{
|
1391 |
+
"epoch": 1.1373863092388703,
|
1392 |
+
"grad_norm": 0.2567324936389923,
|
1393 |
+
"learning_rate": 7.821210662123284e-06,
|
1394 |
+
"loss": 0.3998,
|
1395 |
+
"step": 198
|
1396 |
+
},
|
1397 |
+
{
|
1398 |
+
"epoch": 1.1431306845380564,
|
1399 |
+
"grad_norm": 0.41787901520729065,
|
1400 |
+
"learning_rate": 7.793495887930551e-06,
|
1401 |
+
"loss": 0.3784,
|
1402 |
+
"step": 199
|
1403 |
+
},
|
1404 |
+
{
|
1405 |
+
"epoch": 1.1488750598372426,
|
1406 |
+
"grad_norm": 0.2855132222175598,
|
1407 |
+
"learning_rate": 7.765655770625997e-06,
|
1408 |
+
"loss": 0.3947,
|
1409 |
+
"step": 200
|
1410 |
+
},
|
1411 |
+
{
|
1412 |
+
"epoch": 1.154619435136429,
|
1413 |
+
"grad_norm": 0.3121788799762726,
|
1414 |
+
"learning_rate": 7.737691559385237e-06,
|
1415 |
+
"loss": 0.4236,
|
1416 |
+
"step": 201
|
1417 |
+
},
|
1418 |
+
{
|
1419 |
+
"epoch": 1.160363810435615,
|
1420 |
+
"grad_norm": 0.3103139102458954,
|
1421 |
+
"learning_rate": 7.709604508951927e-06,
|
1422 |
+
"loss": 0.4296,
|
1423 |
+
"step": 202
|
1424 |
+
},
|
1425 |
+
{
|
1426 |
+
"epoch": 1.1661081857348012,
|
1427 |
+
"grad_norm": 0.29311925172805786,
|
1428 |
+
"learning_rate": 7.68139587958148e-06,
|
1429 |
+
"loss": 0.3973,
|
1430 |
+
"step": 203
|
1431 |
+
},
|
1432 |
+
{
|
1433 |
+
"epoch": 1.1718525610339876,
|
1434 |
+
"grad_norm": 0.2649705111980438,
|
1435 |
+
"learning_rate": 7.653066936984504e-06,
|
1436 |
+
"loss": 0.4285,
|
1437 |
+
"step": 204
|
1438 |
+
},
|
1439 |
+
{
|
1440 |
+
"epoch": 1.1775969363331737,
|
1441 |
+
"grad_norm": 0.3288572430610657,
|
1442 |
+
"learning_rate": 7.6246189522700205e-06,
|
1443 |
+
"loss": 0.4415,
|
1444 |
+
"step": 205
|
1445 |
+
},
|
1446 |
+
{
|
1447 |
+
"epoch": 1.18334131163236,
|
1448 |
+
"grad_norm": 0.3235880732536316,
|
1449 |
+
"learning_rate": 7.596053201888425e-06,
|
1450 |
+
"loss": 0.3963,
|
1451 |
+
"step": 206
|
1452 |
+
},
|
1453 |
+
{
|
1454 |
+
"epoch": 1.1890856869315463,
|
1455 |
+
"grad_norm": 0.26849544048309326,
|
1456 |
+
"learning_rate": 7.56737096757421e-06,
|
1457 |
+
"loss": 0.4009,
|
1458 |
+
"step": 207
|
1459 |
+
},
|
1460 |
+
{
|
1461 |
+
"epoch": 1.1948300622307324,
|
1462 |
+
"grad_norm": 0.34631481766700745,
|
1463 |
+
"learning_rate": 7.538573536288466e-06,
|
1464 |
+
"loss": 0.4067,
|
1465 |
+
"step": 208
|
1466 |
+
},
|
1467 |
+
{
|
1468 |
+
"epoch": 1.2005744375299185,
|
1469 |
+
"grad_norm": 0.2917556166648865,
|
1470 |
+
"learning_rate": 7.509662200161122e-06,
|
1471 |
+
"loss": 0.3852,
|
1472 |
+
"step": 209
|
1473 |
+
},
|
1474 |
+
{
|
1475 |
+
"epoch": 1.206318812829105,
|
1476 |
+
"grad_norm": 0.33460524678230286,
|
1477 |
+
"learning_rate": 7.480638256432977e-06,
|
1478 |
+
"loss": 0.4346,
|
1479 |
+
"step": 210
|
1480 |
+
},
|
1481 |
+
{
|
1482 |
+
"epoch": 1.212063188128291,
|
1483 |
+
"grad_norm": 0.31476372480392456,
|
1484 |
+
"learning_rate": 7.4515030073974915e-06,
|
1485 |
+
"loss": 0.4224,
|
1486 |
+
"step": 211
|
1487 |
+
},
|
1488 |
+
{
|
1489 |
+
"epoch": 1.2178075634274772,
|
1490 |
+
"grad_norm": 0.2829968333244324,
|
1491 |
+
"learning_rate": 7.422257760342351e-06,
|
1492 |
+
"loss": 0.416,
|
1493 |
+
"step": 212
|
1494 |
+
},
|
1495 |
+
{
|
1496 |
+
"epoch": 1.2235519387266636,
|
1497 |
+
"grad_norm": 0.2671918272972107,
|
1498 |
+
"learning_rate": 7.392903827490814e-06,
|
1499 |
+
"loss": 0.3893,
|
1500 |
+
"step": 213
|
1501 |
+
},
|
1502 |
+
{
|
1503 |
+
"epoch": 1.2292963140258497,
|
1504 |
+
"grad_norm": 0.3298075497150421,
|
1505 |
+
"learning_rate": 7.363442525942827e-06,
|
1506 |
+
"loss": 0.3805,
|
1507 |
+
"step": 214
|
1508 |
+
},
|
1509 |
+
{
|
1510 |
+
"epoch": 1.2350406893250359,
|
1511 |
+
"grad_norm": 0.26619213819503784,
|
1512 |
+
"learning_rate": 7.333875177615931e-06,
|
1513 |
+
"loss": 0.4179,
|
1514 |
+
"step": 215
|
1515 |
+
},
|
1516 |
+
{
|
1517 |
+
"epoch": 1.2407850646242222,
|
1518 |
+
"grad_norm": 0.26025766134262085,
|
1519 |
+
"learning_rate": 7.304203109185947e-06,
|
1520 |
+
"loss": 0.425,
|
1521 |
+
"step": 216
|
1522 |
+
},
|
1523 |
+
{
|
1524 |
+
"epoch": 1.2465294399234084,
|
1525 |
+
"grad_norm": 0.2681398093700409,
|
1526 |
+
"learning_rate": 7.274427652027444e-06,
|
1527 |
+
"loss": 0.3768,
|
1528 |
+
"step": 217
|
1529 |
+
},
|
1530 |
+
{
|
1531 |
+
"epoch": 1.2522738152225945,
|
1532 |
+
"grad_norm": 0.30282825231552124,
|
1533 |
+
"learning_rate": 7.244550142154009e-06,
|
1534 |
+
"loss": 0.4041,
|
1535 |
+
"step": 218
|
1536 |
+
},
|
1537 |
+
{
|
1538 |
+
"epoch": 1.2580181905217809,
|
1539 |
+
"grad_norm": 0.24776627123355865,
|
1540 |
+
"learning_rate": 7.214571920158293e-06,
|
1541 |
+
"loss": 0.4106,
|
1542 |
+
"step": 219
|
1543 |
+
},
|
1544 |
+
{
|
1545 |
+
"epoch": 1.263762565820967,
|
1546 |
+
"grad_norm": 0.25247007608413696,
|
1547 |
+
"learning_rate": 7.1844943311518665e-06,
|
1548 |
+
"loss": 0.444,
|
1549 |
+
"step": 220
|
1550 |
+
},
|
1551 |
+
{
|
1552 |
+
"epoch": 1.2695069411201532,
|
1553 |
+
"grad_norm": 0.27782031893730164,
|
1554 |
+
"learning_rate": 7.1543187247048525e-06,
|
1555 |
+
"loss": 0.43,
|
1556 |
+
"step": 221
|
1557 |
+
},
|
1558 |
+
{
|
1559 |
+
"epoch": 1.2752513164193395,
|
1560 |
+
"grad_norm": 0.2578721046447754,
|
1561 |
+
"learning_rate": 7.124046454785387e-06,
|
1562 |
+
"loss": 0.3989,
|
1563 |
+
"step": 222
|
1564 |
+
},
|
1565 |
+
{
|
1566 |
+
"epoch": 1.2809956917185257,
|
1567 |
+
"grad_norm": 0.30569854378700256,
|
1568 |
+
"learning_rate": 7.093678879698858e-06,
|
1569 |
+
"loss": 0.4405,
|
1570 |
+
"step": 223
|
1571 |
+
},
|
1572 |
+
{
|
1573 |
+
"epoch": 1.2867400670177118,
|
1574 |
+
"grad_norm": 0.2797735333442688,
|
1575 |
+
"learning_rate": 7.063217362026957e-06,
|
1576 |
+
"loss": 0.3981,
|
1577 |
+
"step": 224
|
1578 |
+
},
|
1579 |
+
{
|
1580 |
+
"epoch": 1.292484442316898,
|
1581 |
+
"grad_norm": 0.2695583403110504,
|
1582 |
+
"learning_rate": 7.032663268566547e-06,
|
1583 |
+
"loss": 0.3935,
|
1584 |
+
"step": 225
|
1585 |
+
},
|
1586 |
+
{
|
1587 |
+
"epoch": 1.2982288176160843,
|
1588 |
+
"grad_norm": 0.294919490814209,
|
1589 |
+
"learning_rate": 7.002017970268336e-06,
|
1590 |
+
"loss": 0.3952,
|
1591 |
+
"step": 226
|
1592 |
+
},
|
1593 |
+
{
|
1594 |
+
"epoch": 1.3039731929152705,
|
1595 |
+
"grad_norm": 0.2755994200706482,
|
1596 |
+
"learning_rate": 6.97128284217535e-06,
|
1597 |
+
"loss": 0.4327,
|
1598 |
+
"step": 227
|
1599 |
+
},
|
1600 |
+
{
|
1601 |
+
"epoch": 1.3097175682144566,
|
1602 |
+
"grad_norm": 0.2905407249927521,
|
1603 |
+
"learning_rate": 6.9404592633612486e-06,
|
1604 |
+
"loss": 0.4658,
|
1605 |
+
"step": 228
|
1606 |
+
},
|
1607 |
+
{
|
1608 |
+
"epoch": 1.3154619435136428,
|
1609 |
+
"grad_norm": 0.2658722400665283,
|
1610 |
+
"learning_rate": 6.909548616868444e-06,
|
1611 |
+
"loss": 0.4237,
|
1612 |
+
"step": 229
|
1613 |
+
},
|
1614 |
+
{
|
1615 |
+
"epoch": 1.3212063188128291,
|
1616 |
+
"grad_norm": 0.2581554651260376,
|
1617 |
+
"learning_rate": 6.878552289646041e-06,
|
1618 |
+
"loss": 0.41,
|
1619 |
+
"step": 230
|
1620 |
+
},
|
1621 |
+
{
|
1622 |
+
"epoch": 1.3269506941120153,
|
1623 |
+
"grad_norm": 0.2688734233379364,
|
1624 |
+
"learning_rate": 6.847471672487607e-06,
|
1625 |
+
"loss": 0.3662,
|
1626 |
+
"step": 231
|
1627 |
+
},
|
1628 |
+
{
|
1629 |
+
"epoch": 1.3326950694112014,
|
1630 |
+
"grad_norm": 0.30700597167015076,
|
1631 |
+
"learning_rate": 6.816308159968761e-06,
|
1632 |
+
"loss": 0.4595,
|
1633 |
+
"step": 232
|
1634 |
+
},
|
1635 |
+
{
|
1636 |
+
"epoch": 1.3384394447103878,
|
1637 |
+
"grad_norm": 0.271698921918869,
|
1638 |
+
"learning_rate": 6.7850631503846165e-06,
|
1639 |
+
"loss": 0.4073,
|
1640 |
+
"step": 233
|
1641 |
+
},
|
1642 |
+
{
|
1643 |
+
"epoch": 1.344183820009574,
|
1644 |
+
"grad_norm": 0.24926996231079102,
|
1645 |
+
"learning_rate": 6.753738045687021e-06,
|
1646 |
+
"loss": 0.42,
|
1647 |
+
"step": 234
|
1648 |
+
},
|
1649 |
+
{
|
1650 |
+
"epoch": 1.34992819530876,
|
1651 |
+
"grad_norm": 0.2579510807991028,
|
1652 |
+
"learning_rate": 6.722334251421665e-06,
|
1653 |
+
"loss": 0.3975,
|
1654 |
+
"step": 235
|
1655 |
+
},
|
1656 |
+
{
|
1657 |
+
"epoch": 1.3556725706079464,
|
1658 |
+
"grad_norm": 0.2766447067260742,
|
1659 |
+
"learning_rate": 6.690853176665007e-06,
|
1660 |
+
"loss": 0.4539,
|
1661 |
+
"step": 236
|
1662 |
+
},
|
1663 |
+
{
|
1664 |
+
"epoch": 1.3614169459071326,
|
1665 |
+
"grad_norm": 0.2743930220603943,
|
1666 |
+
"learning_rate": 6.659296233961055e-06,
|
1667 |
+
"loss": 0.3857,
|
1668 |
+
"step": 237
|
1669 |
+
},
|
1670 |
+
{
|
1671 |
+
"epoch": 1.3671613212063187,
|
1672 |
+
"grad_norm": 0.2682178318500519,
|
1673 |
+
"learning_rate": 6.627664839257979e-06,
|
1674 |
+
"loss": 0.4127,
|
1675 |
+
"step": 238
|
1676 |
+
},
|
1677 |
+
{
|
1678 |
+
"epoch": 1.372905696505505,
|
1679 |
+
"grad_norm": 0.26600515842437744,
|
1680 |
+
"learning_rate": 6.595960411844589e-06,
|
1681 |
+
"loss": 0.4309,
|
1682 |
+
"step": 239
|
1683 |
+
},
|
1684 |
+
{
|
1685 |
+
"epoch": 1.3786500718046912,
|
1686 |
+
"grad_norm": 0.2679958939552307,
|
1687 |
+
"learning_rate": 6.564184374286636e-06,
|
1688 |
+
"loss": 0.4276,
|
1689 |
+
"step": 240
|
1690 |
+
},
|
1691 |
+
{
|
1692 |
+
"epoch": 1.3843944471038774,
|
1693 |
+
"grad_norm": 0.32891181111335754,
|
1694 |
+
"learning_rate": 6.532338152363001e-06,
|
1695 |
+
"loss": 0.3908,
|
1696 |
+
"step": 241
|
1697 |
+
},
|
1698 |
+
{
|
1699 |
+
"epoch": 1.3901388224030637,
|
1700 |
+
"grad_norm": 0.2673075199127197,
|
1701 |
+
"learning_rate": 6.500423175001705e-06,
|
1702 |
+
"loss": 0.3662,
|
1703 |
+
"step": 242
|
1704 |
+
},
|
1705 |
+
{
|
1706 |
+
"epoch": 1.39588319770225,
|
1707 |
+
"grad_norm": 0.29596373438835144,
|
1708 |
+
"learning_rate": 6.468440874215801e-06,
|
1709 |
+
"loss": 0.4268,
|
1710 |
+
"step": 243
|
1711 |
+
},
|
1712 |
+
{
|
1713 |
+
"epoch": 1.401627573001436,
|
1714 |
+
"grad_norm": 0.2759062349796295,
|
1715 |
+
"learning_rate": 6.43639268503912e-06,
|
1716 |
+
"loss": 0.4216,
|
1717 |
+
"step": 244
|
1718 |
+
},
|
1719 |
+
{
|
1720 |
+
"epoch": 1.4073719483006224,
|
1721 |
+
"grad_norm": 0.2963887155056,
|
1722 |
+
"learning_rate": 6.40428004546188e-06,
|
1723 |
+
"loss": 0.4303,
|
1724 |
+
"step": 245
|
1725 |
+
},
|
1726 |
+
{
|
1727 |
+
"epoch": 1.4131163235998085,
|
1728 |
+
"grad_norm": 0.26301926374435425,
|
1729 |
+
"learning_rate": 6.372104396366162e-06,
|
1730 |
+
"loss": 0.3999,
|
1731 |
+
"step": 246
|
1732 |
+
},
|
1733 |
+
{
|
1734 |
+
"epoch": 1.4188606988989947,
|
1735 |
+
"grad_norm": 0.26165592670440674,
|
1736 |
+
"learning_rate": 6.339867181461265e-06,
|
1737 |
+
"loss": 0.4315,
|
1738 |
+
"step": 247
|
1739 |
+
},
|
1740 |
+
{
|
1741 |
+
"epoch": 1.424605074198181,
|
1742 |
+
"grad_norm": 0.2649396061897278,
|
1743 |
+
"learning_rate": 6.307569847218917e-06,
|
1744 |
+
"loss": 0.3932,
|
1745 |
+
"step": 248
|
1746 |
+
},
|
1747 |
+
{
|
1748 |
+
"epoch": 1.4303494494973672,
|
1749 |
+
"grad_norm": 0.26852184534072876,
|
1750 |
+
"learning_rate": 6.275213842808383e-06,
|
1751 |
+
"loss": 0.4079,
|
1752 |
+
"step": 249
|
1753 |
+
},
|
1754 |
+
{
|
1755 |
+
"epoch": 1.4360938247965533,
|
1756 |
+
"grad_norm": 0.2582874894142151,
|
1757 |
+
"learning_rate": 6.242800620031434e-06,
|
1758 |
+
"loss": 0.4011,
|
1759 |
+
"step": 250
|
1760 |
+
},
|
1761 |
+
{
|
1762 |
+
"epoch": 1.4418382000957397,
|
1763 |
+
"grad_norm": 0.27596113085746765,
|
1764 |
+
"learning_rate": 6.2103316332572095e-06,
|
1765 |
+
"loss": 0.4273,
|
1766 |
+
"step": 251
|
1767 |
+
},
|
1768 |
+
{
|
1769 |
+
"epoch": 1.4475825753949259,
|
1770 |
+
"grad_norm": 0.2603662610054016,
|
1771 |
+
"learning_rate": 6.177808339356954e-06,
|
1772 |
+
"loss": 0.4457,
|
1773 |
+
"step": 252
|
1774 |
+
},
|
1775 |
+
{
|
1776 |
+
"epoch": 1.453326950694112,
|
1777 |
+
"grad_norm": 0.2617853283882141,
|
1778 |
+
"learning_rate": 6.14523219763866e-06,
|
1779 |
+
"loss": 0.3843,
|
1780 |
+
"step": 253
|
1781 |
+
},
|
1782 |
+
{
|
1783 |
+
"epoch": 1.4590713259932984,
|
1784 |
+
"grad_norm": 0.2927669882774353,
|
1785 |
+
"learning_rate": 6.112604669781572e-06,
|
1786 |
+
"loss": 0.409,
|
1787 |
+
"step": 254
|
1788 |
+
},
|
1789 |
+
{
|
1790 |
+
"epoch": 1.4648157012924845,
|
1791 |
+
"grad_norm": 0.2545565962791443,
|
1792 |
+
"learning_rate": 6.079927219770623e-06,
|
1793 |
+
"loss": 0.4197,
|
1794 |
+
"step": 255
|
1795 |
+
},
|
1796 |
+
{
|
1797 |
+
"epoch": 1.4705600765916707,
|
1798 |
+
"grad_norm": 0.25056391954421997,
|
1799 |
+
"learning_rate": 6.047201313830724e-06,
|
1800 |
+
"loss": 0.412,
|
1801 |
+
"step": 256
|
1802 |
+
},
|
1803 |
+
{
|
1804 |
+
"epoch": 1.4763044518908568,
|
1805 |
+
"grad_norm": 0.26942598819732666,
|
1806 |
+
"learning_rate": 6.014428420360987e-06,
|
1807 |
+
"loss": 0.4377,
|
1808 |
+
"step": 257
|
1809 |
+
},
|
1810 |
+
{
|
1811 |
+
"epoch": 1.4820488271900432,
|
1812 |
+
"grad_norm": 0.2583950459957123,
|
1813 |
+
"learning_rate": 5.9816100098688456e-06,
|
1814 |
+
"loss": 0.3939,
|
1815 |
+
"step": 258
|
1816 |
+
},
|
1817 |
+
{
|
1818 |
+
"epoch": 1.4877932024892293,
|
1819 |
+
"grad_norm": 0.286765992641449,
|
1820 |
+
"learning_rate": 5.948747554904054e-06,
|
1821 |
+
"loss": 0.4441,
|
1822 |
+
"step": 259
|
1823 |
+
},
|
1824 |
+
{
|
1825 |
+
"epoch": 1.4935375777884154,
|
1826 |
+
"grad_norm": 0.2546658515930176,
|
1827 |
+
"learning_rate": 5.915842529992632e-06,
|
1828 |
+
"loss": 0.4084,
|
1829 |
+
"step": 260
|
1830 |
+
},
|
1831 |
+
{
|
1832 |
+
"epoch": 1.4992819530876016,
|
1833 |
+
"grad_norm": 0.24491065740585327,
|
1834 |
+
"learning_rate": 5.8828964115706925e-06,
|
1835 |
+
"loss": 0.3642,
|
1836 |
+
"step": 261
|
1837 |
+
},
|
1838 |
+
{
|
1839 |
+
"epoch": 1.505026328386788,
|
1840 |
+
"grad_norm": 0.2614350914955139,
|
1841 |
+
"learning_rate": 5.849910677918205e-06,
|
1842 |
+
"loss": 0.385,
|
1843 |
+
"step": 262
|
1844 |
+
},
|
1845 |
+
{
|
1846 |
+
"epoch": 1.510770703685974,
|
1847 |
+
"grad_norm": 0.263822466135025,
|
1848 |
+
"learning_rate": 5.816886809092651e-06,
|
1849 |
+
"loss": 0.3977,
|
1850 |
+
"step": 263
|
1851 |
+
},
|
1852 |
+
{
|
1853 |
+
"epoch": 1.5165150789851602,
|
1854 |
+
"grad_norm": 0.2711222469806671,
|
1855 |
+
"learning_rate": 5.783826286862631e-06,
|
1856 |
+
"loss": 0.4336,
|
1857 |
+
"step": 264
|
1858 |
+
},
|
1859 |
+
{
|
1860 |
+
"epoch": 1.5222594542843466,
|
1861 |
+
"grad_norm": 0.23413866758346558,
|
1862 |
+
"learning_rate": 5.750730594641367e-06,
|
1863 |
+
"loss": 0.391,
|
1864 |
+
"step": 265
|
1865 |
+
},
|
1866 |
+
{
|
1867 |
+
"epoch": 1.5280038295835328,
|
1868 |
+
"grad_norm": 0.26713666319847107,
|
1869 |
+
"learning_rate": 5.717601217420143e-06,
|
1870 |
+
"loss": 0.3995,
|
1871 |
+
"step": 266
|
1872 |
+
},
|
1873 |
+
{
|
1874 |
+
"epoch": 1.533748204882719,
|
1875 |
+
"grad_norm": 0.2639370262622833,
|
1876 |
+
"learning_rate": 5.68443964170168e-06,
|
1877 |
+
"loss": 0.4074,
|
1878 |
+
"step": 267
|
1879 |
+
},
|
1880 |
+
{
|
1881 |
+
"epoch": 1.5394925801819053,
|
1882 |
+
"grad_norm": 0.2865756154060364,
|
1883 |
+
"learning_rate": 5.6512473554334294e-06,
|
1884 |
+
"loss": 0.4146,
|
1885 |
+
"step": 268
|
1886 |
+
},
|
1887 |
+
{
|
1888 |
+
"epoch": 1.5452369554810914,
|
1889 |
+
"grad_norm": 0.256339430809021,
|
1890 |
+
"learning_rate": 5.618025847940817e-06,
|
1891 |
+
"loss": 0.4143,
|
1892 |
+
"step": 269
|
1893 |
+
},
|
1894 |
+
{
|
1895 |
+
"epoch": 1.5509813307802776,
|
1896 |
+
"grad_norm": 0.24432285130023956,
|
1897 |
+
"learning_rate": 5.584776609860414e-06,
|
1898 |
+
"loss": 0.3997,
|
1899 |
+
"step": 270
|
1900 |
+
},
|
1901 |
+
{
|
1902 |
+
"epoch": 1.556725706079464,
|
1903 |
+
"grad_norm": 0.2671893835067749,
|
1904 |
+
"learning_rate": 5.551501133073048e-06,
|
1905 |
+
"loss": 0.4625,
|
1906 |
+
"step": 271
|
1907 |
+
},
|
1908 |
+
{
|
1909 |
+
"epoch": 1.56247008137865,
|
1910 |
+
"grad_norm": 0.2614923417568207,
|
1911 |
+
"learning_rate": 5.518200910636875e-06,
|
1912 |
+
"loss": 0.3983,
|
1913 |
+
"step": 272
|
1914 |
+
},
|
1915 |
+
{
|
1916 |
+
"epoch": 1.5682144566778362,
|
1917 |
+
"grad_norm": 0.2648710608482361,
|
1918 |
+
"learning_rate": 5.4848774367203715e-06,
|
1919 |
+
"loss": 0.4264,
|
1920 |
+
"step": 273
|
1921 |
+
},
|
1922 |
+
{
|
1923 |
+
"epoch": 1.5739588319770226,
|
1924 |
+
"grad_norm": 0.25889015197753906,
|
1925 |
+
"learning_rate": 5.451532206535306e-06,
|
1926 |
+
"loss": 0.4327,
|
1927 |
+
"step": 274
|
1928 |
+
},
|
1929 |
+
{
|
1930 |
+
"epoch": 1.5797032072762087,
|
1931 |
+
"grad_norm": 0.28398048877716064,
|
1932 |
+
"learning_rate": 5.418166716269636e-06,
|
1933 |
+
"loss": 0.4216,
|
1934 |
+
"step": 275
|
1935 |
+
},
|
1936 |
+
{
|
1937 |
+
"epoch": 1.5854475825753949,
|
1938 |
+
"grad_norm": 0.2506558299064636,
|
1939 |
+
"learning_rate": 5.384782463020385e-06,
|
1940 |
+
"loss": 0.4183,
|
1941 |
+
"step": 276
|
1942 |
+
},
|
1943 |
+
{
|
1944 |
+
"epoch": 1.5911919578745812,
|
1945 |
+
"grad_norm": 0.2716221511363983,
|
1946 |
+
"learning_rate": 5.351380944726465e-06,
|
1947 |
+
"loss": 0.4131,
|
1948 |
+
"step": 277
|
1949 |
+
},
|
1950 |
+
{
|
1951 |
+
"epoch": 1.5969363331737674,
|
1952 |
+
"grad_norm": 0.2257940024137497,
|
1953 |
+
"learning_rate": 5.317963660101464e-06,
|
1954 |
+
"loss": 0.3597,
|
1955 |
+
"step": 278
|
1956 |
+
},
|
1957 |
+
{
|
1958 |
+
"epoch": 1.6026807084729535,
|
1959 |
+
"grad_norm": 0.28616511821746826,
|
1960 |
+
"learning_rate": 5.284532108566396e-06,
|
1961 |
+
"loss": 0.4147,
|
1962 |
+
"step": 279
|
1963 |
+
},
|
1964 |
+
{
|
1965 |
+
"epoch": 1.6084250837721399,
|
1966 |
+
"grad_norm": 0.26668059825897217,
|
1967 |
+
"learning_rate": 5.251087790182428e-06,
|
1968 |
+
"loss": 0.4133,
|
1969 |
+
"step": 280
|
1970 |
+
},
|
1971 |
+
{
|
1972 |
+
"epoch": 1.614169459071326,
|
1973 |
+
"grad_norm": 0.28087204694747925,
|
1974 |
+
"learning_rate": 5.217632205583574e-06,
|
1975 |
+
"loss": 0.4326,
|
1976 |
+
"step": 281
|
1977 |
+
},
|
1978 |
+
{
|
1979 |
+
"epoch": 1.6199138343705122,
|
1980 |
+
"grad_norm": 0.2532707452774048,
|
1981 |
+
"learning_rate": 5.184166855909355e-06,
|
1982 |
+
"loss": 0.4132,
|
1983 |
+
"step": 282
|
1984 |
+
},
|
1985 |
+
{
|
1986 |
+
"epoch": 1.6256582096696985,
|
1987 |
+
"grad_norm": 0.2916858196258545,
|
1988 |
+
"learning_rate": 5.150693242737444e-06,
|
1989 |
+
"loss": 0.4304,
|
1990 |
+
"step": 283
|
1991 |
+
},
|
1992 |
+
{
|
1993 |
+
"epoch": 1.6314025849688847,
|
1994 |
+
"grad_norm": 0.2610633969306946,
|
1995 |
+
"learning_rate": 5.117212868016303e-06,
|
1996 |
+
"loss": 0.4486,
|
1997 |
+
"step": 284
|
1998 |
+
},
|
1999 |
+
{
|
2000 |
+
"epoch": 1.6371469602680708,
|
2001 |
+
"grad_norm": 0.27046632766723633,
|
2002 |
+
"learning_rate": 5.083727233997775e-06,
|
2003 |
+
"loss": 0.3898,
|
2004 |
+
"step": 285
|
2005 |
+
},
|
2006 |
+
{
|
2007 |
+
"epoch": 1.6428913355672572,
|
2008 |
+
"grad_norm": 0.2789868116378784,
|
2009 |
+
"learning_rate": 5.05023784316969e-06,
|
2010 |
+
"loss": 0.4064,
|
2011 |
+
"step": 286
|
2012 |
+
},
|
2013 |
+
{
|
2014 |
+
"epoch": 1.6486357108664431,
|
2015 |
+
"grad_norm": 0.31009382009506226,
|
2016 |
+
"learning_rate": 5.016746198188439e-06,
|
2017 |
+
"loss": 0.44,
|
2018 |
+
"step": 287
|
2019 |
+
},
|
2020 |
+
{
|
2021 |
+
"epoch": 1.6543800861656295,
|
2022 |
+
"grad_norm": 0.24213601648807526,
|
2023 |
+
"learning_rate": 4.983253801811562e-06,
|
2024 |
+
"loss": 0.38,
|
2025 |
+
"step": 288
|
2026 |
+
},
|
2027 |
+
{
|
2028 |
+
"epoch": 1.6601244614648158,
|
2029 |
+
"grad_norm": 0.2574230432510376,
|
2030 |
+
"learning_rate": 4.949762156830312e-06,
|
2031 |
+
"loss": 0.4279,
|
2032 |
+
"step": 289
|
2033 |
+
},
|
2034 |
+
{
|
2035 |
+
"epoch": 1.6658688367640018,
|
2036 |
+
"grad_norm": 0.3645157814025879,
|
2037 |
+
"learning_rate": 4.916272766002227e-06,
|
2038 |
+
"loss": 0.4149,
|
2039 |
+
"step": 290
|
2040 |
+
},
|
2041 |
+
{
|
2042 |
+
"epoch": 1.6716132120631881,
|
2043 |
+
"grad_norm": 0.25819137692451477,
|
2044 |
+
"learning_rate": 4.882787131983698e-06,
|
2045 |
+
"loss": 0.3851,
|
2046 |
+
"step": 291
|
2047 |
+
},
|
2048 |
+
{
|
2049 |
+
"epoch": 1.6773575873623745,
|
2050 |
+
"grad_norm": 0.2552003264427185,
|
2051 |
+
"learning_rate": 4.849306757262558e-06,
|
2052 |
+
"loss": 0.3952,
|
2053 |
+
"step": 292
|
2054 |
+
},
|
2055 |
+
{
|
2056 |
+
"epoch": 1.6831019626615604,
|
2057 |
+
"grad_norm": 0.31992608308792114,
|
2058 |
+
"learning_rate": 4.8158331440906466e-06,
|
2059 |
+
"loss": 0.4331,
|
2060 |
+
"step": 293
|
2061 |
+
},
|
2062 |
+
{
|
2063 |
+
"epoch": 1.6888463379607468,
|
2064 |
+
"grad_norm": 0.276024729013443,
|
2065 |
+
"learning_rate": 4.7823677944164285e-06,
|
2066 |
+
"loss": 0.4551,
|
2067 |
+
"step": 294
|
2068 |
+
},
|
2069 |
+
{
|
2070 |
+
"epoch": 1.694590713259933,
|
2071 |
+
"grad_norm": 0.2532188892364502,
|
2072 |
+
"learning_rate": 4.748912209817572e-06,
|
2073 |
+
"loss": 0.4066,
|
2074 |
+
"step": 295
|
2075 |
+
},
|
2076 |
+
{
|
2077 |
+
"epoch": 1.700335088559119,
|
2078 |
+
"grad_norm": 0.24477799236774445,
|
2079 |
+
"learning_rate": 4.715467891433607e-06,
|
2080 |
+
"loss": 0.3797,
|
2081 |
+
"step": 296
|
2082 |
+
},
|
2083 |
+
{
|
2084 |
+
"epoch": 1.7060794638583054,
|
2085 |
+
"grad_norm": 0.2809993624687195,
|
2086 |
+
"learning_rate": 4.682036339898537e-06,
|
2087 |
+
"loss": 0.4005,
|
2088 |
+
"step": 297
|
2089 |
+
},
|
2090 |
+
{
|
2091 |
+
"epoch": 1.7118238391574916,
|
2092 |
+
"grad_norm": 0.2791358530521393,
|
2093 |
+
"learning_rate": 4.6486190552735375e-06,
|
2094 |
+
"loss": 0.4353,
|
2095 |
+
"step": 298
|
2096 |
+
},
|
2097 |
+
{
|
2098 |
+
"epoch": 1.7175682144566777,
|
2099 |
+
"grad_norm": 0.2509957253932953,
|
2100 |
+
"learning_rate": 4.615217536979616e-06,
|
2101 |
+
"loss": 0.4061,
|
2102 |
+
"step": 299
|
2103 |
+
},
|
2104 |
+
{
|
2105 |
+
"epoch": 1.723312589755864,
|
2106 |
+
"grad_norm": 0.24833880364894867,
|
2107 |
+
"learning_rate": 4.581833283730367e-06,
|
2108 |
+
"loss": 0.3808,
|
2109 |
+
"step": 300
|
2110 |
+
},
|
2111 |
+
{
|
2112 |
+
"epoch": 1.7290569650550502,
|
2113 |
+
"grad_norm": 0.30278247594833374,
|
2114 |
+
"learning_rate": 4.548467793464696e-06,
|
2115 |
+
"loss": 0.4447,
|
2116 |
+
"step": 301
|
2117 |
+
},
|
2118 |
+
{
|
2119 |
+
"epoch": 1.7348013403542364,
|
2120 |
+
"grad_norm": 0.2316392958164215,
|
2121 |
+
"learning_rate": 4.515122563279631e-06,
|
2122 |
+
"loss": 0.3859,
|
2123 |
+
"step": 302
|
2124 |
+
},
|
2125 |
+
{
|
2126 |
+
"epoch": 1.7405457156534228,
|
2127 |
+
"grad_norm": 0.25202158093452454,
|
2128 |
+
"learning_rate": 4.481799089363127e-06,
|
2129 |
+
"loss": 0.3879,
|
2130 |
+
"step": 303
|
2131 |
+
},
|
2132 |
+
{
|
2133 |
+
"epoch": 1.746290090952609,
|
2134 |
+
"grad_norm": 0.2888227701187134,
|
2135 |
+
"learning_rate": 4.448498866926952e-06,
|
2136 |
+
"loss": 0.4187,
|
2137 |
+
"step": 304
|
2138 |
+
},
|
2139 |
+
{
|
2140 |
+
"epoch": 1.752034466251795,
|
2141 |
+
"grad_norm": 0.2678024470806122,
|
2142 |
+
"learning_rate": 4.415223390139588e-06,
|
2143 |
+
"loss": 0.4348,
|
2144 |
+
"step": 305
|
2145 |
+
},
|
2146 |
+
{
|
2147 |
+
"epoch": 1.7577788415509814,
|
2148 |
+
"grad_norm": 0.25489702820777893,
|
2149 |
+
"learning_rate": 4.381974152059184e-06,
|
2150 |
+
"loss": 0.4216,
|
2151 |
+
"step": 306
|
2152 |
+
},
|
2153 |
+
{
|
2154 |
+
"epoch": 1.7635232168501676,
|
2155 |
+
"grad_norm": 0.2304321825504303,
|
2156 |
+
"learning_rate": 4.348752644566573e-06,
|
2157 |
+
"loss": 0.3878,
|
2158 |
+
"step": 307
|
2159 |
+
},
|
2160 |
+
{
|
2161 |
+
"epoch": 1.7692675921493537,
|
2162 |
+
"grad_norm": 0.2631133198738098,
|
2163 |
+
"learning_rate": 4.315560358298321e-06,
|
2164 |
+
"loss": 0.4249,
|
2165 |
+
"step": 308
|
2166 |
+
},
|
2167 |
+
{
|
2168 |
+
"epoch": 1.77501196744854,
|
2169 |
+
"grad_norm": 0.25828343629837036,
|
2170 |
+
"learning_rate": 4.2823987825798575e-06,
|
2171 |
+
"loss": 0.4005,
|
2172 |
+
"step": 309
|
2173 |
+
},
|
2174 |
+
{
|
2175 |
+
"epoch": 1.7807563427477262,
|
2176 |
+
"grad_norm": 0.26033782958984375,
|
2177 |
+
"learning_rate": 4.249269405358634e-06,
|
2178 |
+
"loss": 0.3837,
|
2179 |
+
"step": 310
|
2180 |
+
},
|
2181 |
+
{
|
2182 |
+
"epoch": 1.7865007180469124,
|
2183 |
+
"grad_norm": 0.26372194290161133,
|
2184 |
+
"learning_rate": 4.2161737131373695e-06,
|
2185 |
+
"loss": 0.4337,
|
2186 |
+
"step": 311
|
2187 |
+
},
|
2188 |
+
{
|
2189 |
+
"epoch": 1.7922450933460987,
|
2190 |
+
"grad_norm": 0.24711427092552185,
|
2191 |
+
"learning_rate": 4.183113190907349e-06,
|
2192 |
+
"loss": 0.4053,
|
2193 |
+
"step": 312
|
2194 |
+
},
|
2195 |
+
{
|
2196 |
+
"epoch": 1.7979894686452849,
|
2197 |
+
"grad_norm": 0.2691591680049896,
|
2198 |
+
"learning_rate": 4.150089322081797e-06,
|
2199 |
+
"loss": 0.3914,
|
2200 |
+
"step": 313
|
2201 |
+
},
|
2202 |
+
{
|
2203 |
+
"epoch": 1.803733843944471,
|
2204 |
+
"grad_norm": 0.24242590367794037,
|
2205 |
+
"learning_rate": 4.1171035884293075e-06,
|
2206 |
+
"loss": 0.3837,
|
2207 |
+
"step": 314
|
2208 |
+
},
|
2209 |
+
{
|
2210 |
+
"epoch": 1.8094782192436574,
|
2211 |
+
"grad_norm": 0.24888436496257782,
|
2212 |
+
"learning_rate": 4.084157470007371e-06,
|
2213 |
+
"loss": 0.411,
|
2214 |
+
"step": 315
|
2215 |
+
},
|
2216 |
+
{
|
2217 |
+
"epoch": 1.8152225945428435,
|
2218 |
+
"grad_norm": 0.26371899247169495,
|
2219 |
+
"learning_rate": 4.051252445095946e-06,
|
2220 |
+
"loss": 0.4466,
|
2221 |
+
"step": 316
|
2222 |
+
},
|
2223 |
+
{
|
2224 |
+
"epoch": 1.8209669698420297,
|
2225 |
+
"grad_norm": 0.2363133430480957,
|
2226 |
+
"learning_rate": 4.018389990131156e-06,
|
2227 |
+
"loss": 0.4095,
|
2228 |
+
"step": 317
|
2229 |
+
},
|
2230 |
+
{
|
2231 |
+
"epoch": 1.826711345141216,
|
2232 |
+
"grad_norm": 0.23559501767158508,
|
2233 |
+
"learning_rate": 3.985571579639013e-06,
|
2234 |
+
"loss": 0.3997,
|
2235 |
+
"step": 318
|
2236 |
+
},
|
2237 |
+
{
|
2238 |
+
"epoch": 1.832455720440402,
|
2239 |
+
"grad_norm": 0.23539544641971588,
|
2240 |
+
"learning_rate": 3.952798686169279e-06,
|
2241 |
+
"loss": 0.3972,
|
2242 |
+
"step": 319
|
2243 |
+
},
|
2244 |
+
{
|
2245 |
+
"epoch": 1.8382000957395883,
|
2246 |
+
"grad_norm": 0.2795656621456146,
|
2247 |
+
"learning_rate": 3.920072780229378e-06,
|
2248 |
+
"loss": 0.3976,
|
2249 |
+
"step": 320
|
2250 |
+
},
|
2251 |
+
{
|
2252 |
+
"epoch": 1.8439444710387747,
|
2253 |
+
"grad_norm": 0.2547081410884857,
|
2254 |
+
"learning_rate": 3.887395330218429e-06,
|
2255 |
+
"loss": 0.4259,
|
2256 |
+
"step": 321
|
2257 |
+
},
|
2258 |
+
{
|
2259 |
+
"epoch": 1.8496888463379606,
|
2260 |
+
"grad_norm": 0.22198687493801117,
|
2261 |
+
"learning_rate": 3.854767802361342e-06,
|
2262 |
+
"loss": 0.3728,
|
2263 |
+
"step": 322
|
2264 |
+
},
|
2265 |
+
{
|
2266 |
+
"epoch": 1.855433221637147,
|
2267 |
+
"grad_norm": 0.2664637267589569,
|
2268 |
+
"learning_rate": 3.822191660643047e-06,
|
2269 |
+
"loss": 0.4256,
|
2270 |
+
"step": 323
|
2271 |
+
},
|
2272 |
+
{
|
2273 |
+
"epoch": 1.8611775969363333,
|
2274 |
+
"grad_norm": 0.22866730391979218,
|
2275 |
+
"learning_rate": 3.789668366742792e-06,
|
2276 |
+
"loss": 0.4283,
|
2277 |
+
"step": 324
|
2278 |
+
},
|
2279 |
+
{
|
2280 |
+
"epoch": 1.8669219722355193,
|
2281 |
+
"grad_norm": 0.24711646139621735,
|
2282 |
+
"learning_rate": 3.7571993799685675e-06,
|
2283 |
+
"loss": 0.4124,
|
2284 |
+
"step": 325
|
2285 |
+
},
|
2286 |
+
{
|
2287 |
+
"epoch": 1.8726663475347056,
|
2288 |
+
"grad_norm": 0.2538555860519409,
|
2289 |
+
"learning_rate": 3.7247861571916183e-06,
|
2290 |
+
"loss": 0.3934,
|
2291 |
+
"step": 326
|
2292 |
+
},
|
2293 |
+
{
|
2294 |
+
"epoch": 1.8784107228338918,
|
2295 |
+
"grad_norm": 0.2567046582698822,
|
2296 |
+
"learning_rate": 3.6924301527810856e-06,
|
2297 |
+
"loss": 0.411,
|
2298 |
+
"step": 327
|
2299 |
+
},
|
2300 |
+
{
|
2301 |
+
"epoch": 1.884155098133078,
|
2302 |
+
"grad_norm": 0.2725870907306671,
|
2303 |
+
"learning_rate": 3.6601328185387364e-06,
|
2304 |
+
"loss": 0.4195,
|
2305 |
+
"step": 328
|
2306 |
+
},
|
2307 |
+
{
|
2308 |
+
"epoch": 1.8898994734322643,
|
2309 |
+
"grad_norm": 0.24136009812355042,
|
2310 |
+
"learning_rate": 3.6278956036338397e-06,
|
2311 |
+
"loss": 0.3904,
|
2312 |
+
"step": 329
|
2313 |
+
},
|
2314 |
+
{
|
2315 |
+
"epoch": 1.8956438487314504,
|
2316 |
+
"grad_norm": 0.2512601315975189,
|
2317 |
+
"learning_rate": 3.5957199545381216e-06,
|
2318 |
+
"loss": 0.3869,
|
2319 |
+
"step": 330
|
2320 |
+
},
|
2321 |
+
{
|
2322 |
+
"epoch": 1.9013882240306366,
|
2323 |
+
"grad_norm": 0.24334044754505157,
|
2324 |
+
"learning_rate": 3.5636073149608824e-06,
|
2325 |
+
"loss": 0.4019,
|
2326 |
+
"step": 331
|
2327 |
+
},
|
2328 |
+
{
|
2329 |
+
"epoch": 1.907132599329823,
|
2330 |
+
"grad_norm": 0.24103941023349762,
|
2331 |
+
"learning_rate": 3.5315591257842e-06,
|
2332 |
+
"loss": 0.3609,
|
2333 |
+
"step": 332
|
2334 |
+
},
|
2335 |
+
{
|
2336 |
+
"epoch": 1.912876974629009,
|
2337 |
+
"grad_norm": 0.24972982704639435,
|
2338 |
+
"learning_rate": 3.4995768249982975e-06,
|
2339 |
+
"loss": 0.3843,
|
2340 |
+
"step": 333
|
2341 |
+
},
|
2342 |
+
{
|
2343 |
+
"epoch": 1.9186213499281952,
|
2344 |
+
"grad_norm": 0.24723373353481293,
|
2345 |
+
"learning_rate": 3.467661847637001e-06,
|
2346 |
+
"loss": 0.4227,
|
2347 |
+
"step": 334
|
2348 |
+
},
|
2349 |
+
{
|
2350 |
+
"epoch": 1.9243657252273816,
|
2351 |
+
"grad_norm": 0.24389074742794037,
|
2352 |
+
"learning_rate": 3.4358156257133644e-06,
|
2353 |
+
"loss": 0.4296,
|
2354 |
+
"step": 335
|
2355 |
+
},
|
2356 |
+
{
|
2357 |
+
"epoch": 1.9301101005265677,
|
2358 |
+
"grad_norm": 0.2541641294956207,
|
2359 |
+
"learning_rate": 3.404039588155413e-06,
|
2360 |
+
"loss": 0.3727,
|
2361 |
+
"step": 336
|
2362 |
+
},
|
2363 |
+
{
|
2364 |
+
"epoch": 1.9358544758257539,
|
2365 |
+
"grad_norm": 0.22656212747097015,
|
2366 |
+
"learning_rate": 3.372335160742022e-06,
|
2367 |
+
"loss": 0.3666,
|
2368 |
+
"step": 337
|
2369 |
+
},
|
2370 |
+
{
|
2371 |
+
"epoch": 1.9415988511249402,
|
2372 |
+
"grad_norm": 0.24361655116081238,
|
2373 |
+
"learning_rate": 3.3407037660389474e-06,
|
2374 |
+
"loss": 0.388,
|
2375 |
+
"step": 338
|
2376 |
+
},
|
2377 |
+
{
|
2378 |
+
"epoch": 1.9473432264241264,
|
2379 |
+
"grad_norm": 0.24967317283153534,
|
2380 |
+
"learning_rate": 3.3091468233349934e-06,
|
2381 |
+
"loss": 0.4433,
|
2382 |
+
"step": 339
|
2383 |
+
},
|
2384 |
+
{
|
2385 |
+
"epoch": 1.9530876017233125,
|
2386 |
+
"grad_norm": 0.23722046613693237,
|
2387 |
+
"learning_rate": 3.2776657485783357e-06,
|
2388 |
+
"loss": 0.4124,
|
2389 |
+
"step": 340
|
2390 |
+
},
|
2391 |
+
{
|
2392 |
+
"epoch": 1.958831977022499,
|
2393 |
+
"grad_norm": 0.24369174242019653,
|
2394 |
+
"learning_rate": 3.246261954312979e-06,
|
2395 |
+
"loss": 0.4216,
|
2396 |
+
"step": 341
|
2397 |
+
},
|
2398 |
+
{
|
2399 |
+
"epoch": 1.964576352321685,
|
2400 |
+
"grad_norm": 0.23776546120643616,
|
2401 |
+
"learning_rate": 3.2149368496153856e-06,
|
2402 |
+
"loss": 0.4256,
|
2403 |
+
"step": 342
|
2404 |
+
},
|
2405 |
+
{
|
2406 |
+
"epoch": 1.9703207276208712,
|
2407 |
+
"grad_norm": 0.25640493631362915,
|
2408 |
+
"learning_rate": 3.1836918400312387e-06,
|
2409 |
+
"loss": 0.3812,
|
2410 |
+
"step": 343
|
2411 |
+
},
|
2412 |
+
{
|
2413 |
+
"epoch": 1.9760651029200575,
|
2414 |
+
"grad_norm": 0.24129056930541992,
|
2415 |
+
"learning_rate": 3.152528327512395e-06,
|
2416 |
+
"loss": 0.4002,
|
2417 |
+
"step": 344
|
2418 |
+
},
|
2419 |
+
{
|
2420 |
+
"epoch": 1.9818094782192437,
|
2421 |
+
"grad_norm": 0.24882204830646515,
|
2422 |
+
"learning_rate": 3.1214477103539585e-06,
|
2423 |
+
"loss": 0.414,
|
2424 |
+
"step": 345
|
2425 |
+
},
|
2426 |
+
{
|
2427 |
+
"epoch": 1.9875538535184298,
|
2428 |
+
"grad_norm": 0.24418820440769196,
|
2429 |
+
"learning_rate": 3.0904513831315563e-06,
|
2430 |
+
"loss": 0.4261,
|
2431 |
+
"step": 346
|
2432 |
+
},
|
2433 |
+
{
|
2434 |
+
"epoch": 1.9932982288176162,
|
2435 |
+
"grad_norm": 0.2543267011642456,
|
2436 |
+
"learning_rate": 3.059540736638751e-06,
|
2437 |
+
"loss": 0.4272,
|
2438 |
+
"step": 347
|
2439 |
+
},
|
2440 |
+
{
|
2441 |
+
"epoch": 1.9990426041168023,
|
2442 |
+
"grad_norm": 0.23077604174613953,
|
2443 |
+
"learning_rate": 3.028717157824652e-06,
|
2444 |
+
"loss": 0.3639,
|
2445 |
+
"step": 348
|
2446 |
+
},
|
2447 |
+
{
|
2448 |
+
"epoch": 2.0047869794159885,
|
2449 |
+
"grad_norm": 0.9153398871421814,
|
2450 |
+
"learning_rate": 2.9979820297316652e-06,
|
2451 |
+
"loss": 0.6275,
|
2452 |
+
"step": 349
|
2453 |
+
},
|
2454 |
+
{
|
2455 |
+
"epoch": 2.010531354715175,
|
2456 |
+
"grad_norm": 0.31731805205345154,
|
2457 |
+
"learning_rate": 2.9673367314334533e-06,
|
2458 |
+
"loss": 0.4267,
|
2459 |
+
"step": 350
|
2460 |
+
},
|
2461 |
+
{
|
2462 |
+
"epoch": 2.016275730014361,
|
2463 |
+
"grad_norm": 0.2978307008743286,
|
2464 |
+
"learning_rate": 2.936782637973044e-06,
|
2465 |
+
"loss": 0.3788,
|
2466 |
+
"step": 351
|
2467 |
+
},
|
2468 |
+
{
|
2469 |
+
"epoch": 2.022020105313547,
|
2470 |
+
"grad_norm": 0.28210750222206116,
|
2471 |
+
"learning_rate": 2.9063211203011443e-06,
|
2472 |
+
"loss": 0.383,
|
2473 |
+
"step": 352
|
2474 |
+
},
|
2475 |
+
{
|
2476 |
+
"epoch": 2.0277644806127335,
|
2477 |
+
"grad_norm": 0.28862592577934265,
|
2478 |
+
"learning_rate": 2.8759535452146128e-06,
|
2479 |
+
"loss": 0.3827,
|
2480 |
+
"step": 353
|
2481 |
+
},
|
2482 |
+
{
|
2483 |
+
"epoch": 2.0335088559119194,
|
2484 |
+
"grad_norm": 0.3073420524597168,
|
2485 |
+
"learning_rate": 2.8456812752951483e-06,
|
2486 |
+
"loss": 0.3858,
|
2487 |
+
"step": 354
|
2488 |
+
},
|
2489 |
+
{
|
2490 |
+
"epoch": 2.039253231211106,
|
2491 |
+
"grad_norm": 0.27528196573257446,
|
2492 |
+
"learning_rate": 2.815505668848136e-06,
|
2493 |
+
"loss": 0.3536,
|
2494 |
+
"step": 355
|
2495 |
+
},
|
2496 |
+
{
|
2497 |
+
"epoch": 2.044997606510292,
|
2498 |
+
"grad_norm": 0.25948259234428406,
|
2499 |
+
"learning_rate": 2.785428079841709e-06,
|
2500 |
+
"loss": 0.3844,
|
2501 |
+
"step": 356
|
2502 |
+
},
|
2503 |
+
{
|
2504 |
+
"epoch": 2.050741981809478,
|
2505 |
+
"grad_norm": 0.3084884285926819,
|
2506 |
+
"learning_rate": 2.755449857845992e-06,
|
2507 |
+
"loss": 0.3772,
|
2508 |
+
"step": 357
|
2509 |
+
},
|
2510 |
+
{
|
2511 |
+
"epoch": 2.0564863571086645,
|
2512 |
+
"grad_norm": 0.2928623855113983,
|
2513 |
+
"learning_rate": 2.725572347972558e-06,
|
2514 |
+
"loss": 0.3562,
|
2515 |
+
"step": 358
|
2516 |
+
},
|
2517 |
+
{
|
2518 |
+
"epoch": 2.062230732407851,
|
2519 |
+
"grad_norm": 0.27207672595977783,
|
2520 |
+
"learning_rate": 2.6957968908140546e-06,
|
2521 |
+
"loss": 0.362,
|
2522 |
+
"step": 359
|
2523 |
+
},
|
2524 |
+
{
|
2525 |
+
"epoch": 2.0679751077070367,
|
2526 |
+
"grad_norm": 0.2691848576068878,
|
2527 |
+
"learning_rate": 2.666124822384071e-06,
|
2528 |
+
"loss": 0.3807,
|
2529 |
+
"step": 360
|
2530 |
+
},
|
2531 |
+
{
|
2532 |
+
"epoch": 2.073719483006223,
|
2533 |
+
"grad_norm": 0.22607873380184174,
|
2534 |
+
"learning_rate": 2.636557474057173e-06,
|
2535 |
+
"loss": 0.3436,
|
2536 |
+
"step": 361
|
2537 |
+
},
|
2538 |
+
{
|
2539 |
+
"epoch": 2.0794638583054095,
|
2540 |
+
"grad_norm": 0.25091448426246643,
|
2541 |
+
"learning_rate": 2.607096172509187e-06,
|
2542 |
+
"loss": 0.374,
|
2543 |
+
"step": 362
|
2544 |
+
},
|
2545 |
+
{
|
2546 |
+
"epoch": 2.0852082336045954,
|
2547 |
+
"grad_norm": 0.306155800819397,
|
2548 |
+
"learning_rate": 2.5777422396576503e-06,
|
2549 |
+
"loss": 0.4164,
|
2550 |
+
"step": 363
|
2551 |
+
},
|
2552 |
+
{
|
2553 |
+
"epoch": 2.0909526089037818,
|
2554 |
+
"grad_norm": 0.25360941886901855,
|
2555 |
+
"learning_rate": 2.5484969926025114e-06,
|
2556 |
+
"loss": 0.3706,
|
2557 |
+
"step": 364
|
2558 |
+
},
|
2559 |
+
{
|
2560 |
+
"epoch": 2.096696984202968,
|
2561 |
+
"grad_norm": 0.2415456920862198,
|
2562 |
+
"learning_rate": 2.5193617435670244e-06,
|
2563 |
+
"loss": 0.4131,
|
2564 |
+
"step": 365
|
2565 |
+
},
|
2566 |
+
{
|
2567 |
+
"epoch": 2.102441359502154,
|
2568 |
+
"grad_norm": 0.23606958985328674,
|
2569 |
+
"learning_rate": 2.4903377998388783e-06,
|
2570 |
+
"loss": 0.3781,
|
2571 |
+
"step": 366
|
2572 |
+
},
|
2573 |
+
{
|
2574 |
+
"epoch": 2.1081857348013404,
|
2575 |
+
"grad_norm": 0.2365504652261734,
|
2576 |
+
"learning_rate": 2.461426463711535e-06,
|
2577 |
+
"loss": 0.3769,
|
2578 |
+
"step": 367
|
2579 |
+
},
|
2580 |
+
{
|
2581 |
+
"epoch": 2.113930110100527,
|
2582 |
+
"grad_norm": 0.23365849256515503,
|
2583 |
+
"learning_rate": 2.4326290324257896e-06,
|
2584 |
+
"loss": 0.3573,
|
2585 |
+
"step": 368
|
2586 |
+
},
|
2587 |
+
{
|
2588 |
+
"epoch": 2.1196744853997127,
|
2589 |
+
"grad_norm": 0.24357256293296814,
|
2590 |
+
"learning_rate": 2.403946798111576e-06,
|
2591 |
+
"loss": 0.3505,
|
2592 |
+
"step": 369
|
2593 |
+
},
|
2594 |
+
{
|
2595 |
+
"epoch": 2.125418860698899,
|
2596 |
+
"grad_norm": 0.28438547253608704,
|
2597 |
+
"learning_rate": 2.37538104772998e-06,
|
2598 |
+
"loss": 0.4116,
|
2599 |
+
"step": 370
|
2600 |
+
},
|
2601 |
+
{
|
2602 |
+
"epoch": 2.131163235998085,
|
2603 |
+
"grad_norm": 0.21200957894325256,
|
2604 |
+
"learning_rate": 2.3469330630154974e-06,
|
2605 |
+
"loss": 0.326,
|
2606 |
+
"step": 371
|
2607 |
+
},
|
2608 |
+
{
|
2609 |
+
"epoch": 2.1369076112972714,
|
2610 |
+
"grad_norm": 0.2492566853761673,
|
2611 |
+
"learning_rate": 2.318604120418521e-06,
|
2612 |
+
"loss": 0.3999,
|
2613 |
+
"step": 372
|
2614 |
+
},
|
2615 |
+
{
|
2616 |
+
"epoch": 2.1426519865964577,
|
2617 |
+
"grad_norm": 0.22796769440174103,
|
2618 |
+
"learning_rate": 2.2903954910480746e-06,
|
2619 |
+
"loss": 0.391,
|
2620 |
+
"step": 373
|
2621 |
+
},
|
2622 |
+
{
|
2623 |
+
"epoch": 2.1483963618956436,
|
2624 |
+
"grad_norm": 0.23675256967544556,
|
2625 |
+
"learning_rate": 2.2623084406147643e-06,
|
2626 |
+
"loss": 0.362,
|
2627 |
+
"step": 374
|
2628 |
+
},
|
2629 |
+
{
|
2630 |
+
"epoch": 2.15414073719483,
|
2631 |
+
"grad_norm": 0.22599707543849945,
|
2632 |
+
"learning_rate": 2.234344229374003e-06,
|
2633 |
+
"loss": 0.3744,
|
2634 |
+
"step": 375
|
2635 |
+
},
|
2636 |
+
{
|
2637 |
+
"epoch": 2.1598851124940164,
|
2638 |
+
"grad_norm": 0.23200853168964386,
|
2639 |
+
"learning_rate": 2.2065041120694487e-06,
|
2640 |
+
"loss": 0.3748,
|
2641 |
+
"step": 376
|
2642 |
+
},
|
2643 |
+
{
|
2644 |
+
"epoch": 2.1656294877932023,
|
2645 |
+
"grad_norm": 0.22807936370372772,
|
2646 |
+
"learning_rate": 2.178789337876716e-06,
|
2647 |
+
"loss": 0.3762,
|
2648 |
+
"step": 377
|
2649 |
+
},
|
2650 |
+
{
|
2651 |
+
"epoch": 2.1713738630923887,
|
2652 |
+
"grad_norm": 0.21814514696598053,
|
2653 |
+
"learning_rate": 2.151201150347318e-06,
|
2654 |
+
"loss": 0.3669,
|
2655 |
+
"step": 378
|
2656 |
+
},
|
2657 |
+
{
|
2658 |
+
"epoch": 2.177118238391575,
|
2659 |
+
"grad_norm": 0.22707633674144745,
|
2660 |
+
"learning_rate": 2.123740787352872e-06,
|
2661 |
+
"loss": 0.341,
|
2662 |
+
"step": 379
|
2663 |
+
},
|
2664 |
+
{
|
2665 |
+
"epoch": 2.182862613690761,
|
2666 |
+
"grad_norm": 0.2529568672180176,
|
2667 |
+
"learning_rate": 2.096409481029556e-06,
|
2668 |
+
"loss": 0.3723,
|
2669 |
+
"step": 380
|
2670 |
+
},
|
2671 |
+
{
|
2672 |
+
"epoch": 2.1886069889899473,
|
2673 |
+
"grad_norm": 0.231676384806633,
|
2674 |
+
"learning_rate": 2.069208457722828e-06,
|
2675 |
+
"loss": 0.3581,
|
2676 |
+
"step": 381
|
2677 |
+
},
|
2678 |
+
{
|
2679 |
+
"epoch": 2.1943513642891337,
|
2680 |
+
"grad_norm": 0.22723394632339478,
|
2681 |
+
"learning_rate": 2.042138937932388e-06,
|
2682 |
+
"loss": 0.3621,
|
2683 |
+
"step": 382
|
2684 |
+
},
|
2685 |
+
{
|
2686 |
+
"epoch": 2.2000957395883196,
|
2687 |
+
"grad_norm": 0.24381379783153534,
|
2688 |
+
"learning_rate": 2.015202136257432e-06,
|
2689 |
+
"loss": 0.411,
|
2690 |
+
"step": 383
|
2691 |
+
},
|
2692 |
+
{
|
2693 |
+
"epoch": 2.205840114887506,
|
2694 |
+
"grad_norm": 0.22075577080249786,
|
2695 |
+
"learning_rate": 1.988399261342135e-06,
|
2696 |
+
"loss": 0.3448,
|
2697 |
+
"step": 384
|
2698 |
+
},
|
2699 |
+
{
|
2700 |
+
"epoch": 2.2115844901866923,
|
2701 |
+
"grad_norm": 0.2534730136394501,
|
2702 |
+
"learning_rate": 1.9617315158214363e-06,
|
2703 |
+
"loss": 0.3949,
|
2704 |
+
"step": 385
|
2705 |
+
},
|
2706 |
+
{
|
2707 |
+
"epoch": 2.2173288654858783,
|
2708 |
+
"grad_norm": 0.2261105328798294,
|
2709 |
+
"learning_rate": 1.935200096267064e-06,
|
2710 |
+
"loss": 0.3752,
|
2711 |
+
"step": 386
|
2712 |
+
},
|
2713 |
+
{
|
2714 |
+
"epoch": 2.2230732407850646,
|
2715 |
+
"grad_norm": 0.22049082815647125,
|
2716 |
+
"learning_rate": 1.908806193133855e-06,
|
2717 |
+
"loss": 0.3693,
|
2718 |
+
"step": 387
|
2719 |
+
},
|
2720 |
+
{
|
2721 |
+
"epoch": 2.228817616084251,
|
2722 |
+
"grad_norm": 0.21172842383384705,
|
2723 |
+
"learning_rate": 1.8825509907063328e-06,
|
2724 |
+
"loss": 0.395,
|
2725 |
+
"step": 388
|
2726 |
+
},
|
2727 |
+
{
|
2728 |
+
"epoch": 2.234561991383437,
|
2729 |
+
"grad_norm": 0.24431262910366058,
|
2730 |
+
"learning_rate": 1.856435667045577e-06,
|
2731 |
+
"loss": 0.3871,
|
2732 |
+
"step": 389
|
2733 |
+
},
|
2734 |
+
{
|
2735 |
+
"epoch": 2.2403063666826233,
|
2736 |
+
"grad_norm": 0.24871356785297394,
|
2737 |
+
"learning_rate": 1.8304613939363531e-06,
|
2738 |
+
"loss": 0.3852,
|
2739 |
+
"step": 390
|
2740 |
+
},
|
2741 |
+
{
|
2742 |
+
"epoch": 2.2460507419818097,
|
2743 |
+
"grad_norm": 0.21080052852630615,
|
2744 |
+
"learning_rate": 1.8046293368345485e-06,
|
2745 |
+
"loss": 0.3687,
|
2746 |
+
"step": 391
|
2747 |
+
},
|
2748 |
+
{
|
2749 |
+
"epoch": 2.2517951172809956,
|
2750 |
+
"grad_norm": 0.1993173360824585,
|
2751 |
+
"learning_rate": 1.7789406548148647e-06,
|
2752 |
+
"loss": 0.3554,
|
2753 |
+
"step": 392
|
2754 |
+
},
|
2755 |
+
{
|
2756 |
+
"epoch": 2.257539492580182,
|
2757 |
+
"grad_norm": 0.23289141058921814,
|
2758 |
+
"learning_rate": 1.7533965005188242e-06,
|
2759 |
+
"loss": 0.4427,
|
2760 |
+
"step": 393
|
2761 |
+
},
|
2762 |
+
{
|
2763 |
+
"epoch": 2.2632838678793683,
|
2764 |
+
"grad_norm": 0.20976853370666504,
|
2765 |
+
"learning_rate": 1.7279980201030382e-06,
|
2766 |
+
"loss": 0.3516,
|
2767 |
+
"step": 394
|
2768 |
+
},
|
2769 |
+
{
|
2770 |
+
"epoch": 2.2690282431785542,
|
2771 |
+
"grad_norm": 0.23207247257232666,
|
2772 |
+
"learning_rate": 1.7027463531877897e-06,
|
2773 |
+
"loss": 0.3759,
|
2774 |
+
"step": 395
|
2775 |
+
},
|
2776 |
+
{
|
2777 |
+
"epoch": 2.2747726184777406,
|
2778 |
+
"grad_norm": 0.22939598560333252,
|
2779 |
+
"learning_rate": 1.677642632805892e-06,
|
2780 |
+
"loss": 0.3825,
|
2781 |
+
"step": 396
|
2782 |
+
},
|
2783 |
+
{
|
2784 |
+
"epoch": 2.280516993776927,
|
2785 |
+
"grad_norm": 0.22694642841815948,
|
2786 |
+
"learning_rate": 1.6526879853518558e-06,
|
2787 |
+
"loss": 0.3804,
|
2788 |
+
"step": 397
|
2789 |
+
},
|
2790 |
+
{
|
2791 |
+
"epoch": 2.286261369076113,
|
2792 |
+
"grad_norm": 0.2267027348279953,
|
2793 |
+
"learning_rate": 1.6278835305313462e-06,
|
2794 |
+
"loss": 0.3835,
|
2795 |
+
"step": 398
|
2796 |
+
},
|
2797 |
+
{
|
2798 |
+
"epoch": 2.2920057443752992,
|
2799 |
+
"grad_norm": 0.23273469507694244,
|
2800 |
+
"learning_rate": 1.6032303813109368e-06,
|
2801 |
+
"loss": 0.3769,
|
2802 |
+
"step": 399
|
2803 |
+
},
|
2804 |
+
{
|
2805 |
+
"epoch": 2.297750119674485,
|
2806 |
+
"grad_norm": 0.22379297018051147,
|
2807 |
+
"learning_rate": 1.578729643868181e-06,
|
2808 |
+
"loss": 0.3861,
|
2809 |
+
"step": 400
|
2810 |
+
},
|
2811 |
+
{
|
2812 |
+
"epoch": 2.3034944949736715,
|
2813 |
+
"grad_norm": 0.2582657039165497,
|
2814 |
+
"learning_rate": 1.5543824175419691e-06,
|
2815 |
+
"loss": 0.3953,
|
2816 |
+
"step": 401
|
2817 |
+
},
|
2818 |
+
{
|
2819 |
+
"epoch": 2.309238870272858,
|
2820 |
+
"grad_norm": 0.20334425568580627,
|
2821 |
+
"learning_rate": 1.5301897947832063e-06,
|
2822 |
+
"loss": 0.3766,
|
2823 |
+
"step": 402
|
2824 |
+
},
|
2825 |
+
{
|
2826 |
+
"epoch": 2.3149832455720443,
|
2827 |
+
"grad_norm": 0.20961356163024902,
|
2828 |
+
"learning_rate": 1.5061528611057917e-06,
|
2829 |
+
"loss": 0.3515,
|
2830 |
+
"step": 403
|
2831 |
+
},
|
2832 |
+
{
|
2833 |
+
"epoch": 2.32072762087123,
|
2834 |
+
"grad_norm": 0.226267471909523,
|
2835 |
+
"learning_rate": 1.4822726950379207e-06,
|
2836 |
+
"loss": 0.4064,
|
2837 |
+
"step": 404
|
2838 |
+
},
|
2839 |
+
{
|
2840 |
+
"epoch": 2.3264719961704166,
|
2841 |
+
"grad_norm": 0.2132333666086197,
|
2842 |
+
"learning_rate": 1.4585503680736756e-06,
|
2843 |
+
"loss": 0.3639,
|
2844 |
+
"step": 405
|
2845 |
+
},
|
2846 |
+
{
|
2847 |
+
"epoch": 2.3322163714696025,
|
2848 |
+
"grad_norm": 0.22161203622817993,
|
2849 |
+
"learning_rate": 1.4349869446249664e-06,
|
2850 |
+
"loss": 0.3643,
|
2851 |
+
"step": 406
|
2852 |
+
},
|
2853 |
+
{
|
2854 |
+
"epoch": 2.337960746768789,
|
2855 |
+
"grad_norm": 0.21189740300178528,
|
2856 |
+
"learning_rate": 1.4115834819737534e-06,
|
2857 |
+
"loss": 0.3316,
|
2858 |
+
"step": 407
|
2859 |
+
},
|
2860 |
+
{
|
2861 |
+
"epoch": 2.343705122067975,
|
2862 |
+
"grad_norm": 0.2345123291015625,
|
2863 |
+
"learning_rate": 1.3883410302246237e-06,
|
2864 |
+
"loss": 0.3701,
|
2865 |
+
"step": 408
|
2866 |
+
},
|
2867 |
+
{
|
2868 |
+
"epoch": 2.349449497367161,
|
2869 |
+
"grad_norm": 0.23276999592781067,
|
2870 |
+
"learning_rate": 1.3652606322576606e-06,
|
2871 |
+
"loss": 0.3434,
|
2872 |
+
"step": 409
|
2873 |
+
},
|
2874 |
+
{
|
2875 |
+
"epoch": 2.3551938726663475,
|
2876 |
+
"grad_norm": 0.21485473215579987,
|
2877 |
+
"learning_rate": 1.3423433236816563e-06,
|
2878 |
+
"loss": 0.3821,
|
2879 |
+
"step": 410
|
2880 |
+
},
|
2881 |
+
{
|
2882 |
+
"epoch": 2.360938247965534,
|
2883 |
+
"grad_norm": 0.21681098639965057,
|
2884 |
+
"learning_rate": 1.3195901327876426e-06,
|
2885 |
+
"loss": 0.3717,
|
2886 |
+
"step": 411
|
2887 |
+
},
|
2888 |
+
{
|
2889 |
+
"epoch": 2.36668262326472,
|
2890 |
+
"grad_norm": 0.2142193466424942,
|
2891 |
+
"learning_rate": 1.2970020805027555e-06,
|
2892 |
+
"loss": 0.3555,
|
2893 |
+
"step": 412
|
2894 |
+
},
|
2895 |
+
{
|
2896 |
+
"epoch": 2.372426998563906,
|
2897 |
+
"grad_norm": 0.2359546720981598,
|
2898 |
+
"learning_rate": 1.2745801803444192e-06,
|
2899 |
+
"loss": 0.3989,
|
2900 |
+
"step": 413
|
2901 |
+
},
|
2902 |
+
{
|
2903 |
+
"epoch": 2.3781713738630925,
|
2904 |
+
"grad_norm": 0.25387322902679443,
|
2905 |
+
"learning_rate": 1.25232543837488e-06,
|
2906 |
+
"loss": 0.3943,
|
2907 |
+
"step": 414
|
2908 |
+
},
|
2909 |
+
{
|
2910 |
+
"epoch": 2.3839157491622784,
|
2911 |
+
"grad_norm": 0.21573728322982788,
|
2912 |
+
"learning_rate": 1.2302388531560515e-06,
|
2913 |
+
"loss": 0.3693,
|
2914 |
+
"step": 415
|
2915 |
+
},
|
2916 |
+
{
|
2917 |
+
"epoch": 2.389660124461465,
|
2918 |
+
"grad_norm": 0.2112240195274353,
|
2919 |
+
"learning_rate": 1.2083214157047257e-06,
|
2920 |
+
"loss": 0.3896,
|
2921 |
+
"step": 416
|
2922 |
+
},
|
2923 |
+
{
|
2924 |
+
"epoch": 2.395404499760651,
|
2925 |
+
"grad_norm": 0.21627095341682434,
|
2926 |
+
"learning_rate": 1.186574109448091e-06,
|
2927 |
+
"loss": 0.3827,
|
2928 |
+
"step": 417
|
2929 |
+
},
|
2930 |
+
{
|
2931 |
+
"epoch": 2.401148875059837,
|
2932 |
+
"grad_norm": 0.20708608627319336,
|
2933 |
+
"learning_rate": 1.164997910179615e-06,
|
2934 |
+
"loss": 0.3978,
|
2935 |
+
"step": 418
|
2936 |
+
},
|
2937 |
+
{
|
2938 |
+
"epoch": 2.4068932503590235,
|
2939 |
+
"grad_norm": 0.21296700835227966,
|
2940 |
+
"learning_rate": 1.1435937860152579e-06,
|
2941 |
+
"loss": 0.384,
|
2942 |
+
"step": 419
|
2943 |
+
},
|
2944 |
+
{
|
2945 |
+
"epoch": 2.41263762565821,
|
2946 |
+
"grad_norm": 0.21074937283992767,
|
2947 |
+
"learning_rate": 1.1223626973500395e-06,
|
2948 |
+
"loss": 0.3468,
|
2949 |
+
"step": 420
|
2950 |
+
},
|
2951 |
+
{
|
2952 |
+
"epoch": 2.4183820009573957,
|
2953 |
+
"grad_norm": 0.22150783240795135,
|
2954 |
+
"learning_rate": 1.1013055968149343e-06,
|
2955 |
+
"loss": 0.3817,
|
2956 |
+
"step": 421
|
2957 |
+
},
|
2958 |
+
{
|
2959 |
+
"epoch": 2.424126376256582,
|
2960 |
+
"grad_norm": 0.21905986964702606,
|
2961 |
+
"learning_rate": 1.0804234292341426e-06,
|
2962 |
+
"loss": 0.3984,
|
2963 |
+
"step": 422
|
2964 |
+
},
|
2965 |
+
{
|
2966 |
+
"epoch": 2.4298707515557685,
|
2967 |
+
"grad_norm": 0.20688596367835999,
|
2968 |
+
"learning_rate": 1.0597171315826805e-06,
|
2969 |
+
"loss": 0.3382,
|
2970 |
+
"step": 423
|
2971 |
+
},
|
2972 |
+
{
|
2973 |
+
"epoch": 2.4356151268549544,
|
2974 |
+
"grad_norm": 0.21128499507904053,
|
2975 |
+
"learning_rate": 1.0391876329443534e-06,
|
2976 |
+
"loss": 0.3582,
|
2977 |
+
"step": 424
|
2978 |
+
},
|
2979 |
+
{
|
2980 |
+
"epoch": 2.4413595021541408,
|
2981 |
+
"grad_norm": 0.23447024822235107,
|
2982 |
+
"learning_rate": 1.0188358544700583e-06,
|
2983 |
+
"loss": 0.3776,
|
2984 |
+
"step": 425
|
2985 |
+
},
|
2986 |
+
{
|
2987 |
+
"epoch": 2.447103877453327,
|
2988 |
+
"grad_norm": 0.21497882902622223,
|
2989 |
+
"learning_rate": 9.986627093364542e-07,
|
2990 |
+
"loss": 0.4066,
|
2991 |
+
"step": 426
|
2992 |
+
},
|
2993 |
+
{
|
2994 |
+
"epoch": 2.452848252752513,
|
2995 |
+
"grad_norm": 0.20009803771972656,
|
2996 |
+
"learning_rate": 9.786691027049893e-07,
|
2997 |
+
"loss": 0.3365,
|
2998 |
+
"step": 427
|
2999 |
+
},
|
3000 |
+
{
|
3001 |
+
"epoch": 2.4585926280516994,
|
3002 |
+
"grad_norm": 0.24452891945838928,
|
3003 |
+
"learning_rate": 9.588559316812906e-07,
|
3004 |
+
"loss": 0.4195,
|
3005 |
+
"step": 428
|
3006 |
+
},
|
3007 |
+
{
|
3008 |
+
"epoch": 2.4643370033508853,
|
3009 |
+
"grad_norm": 0.20632074773311615,
|
3010 |
+
"learning_rate": 9.392240852749007e-07,
|
3011 |
+
"loss": 0.3858,
|
3012 |
+
"step": 429
|
3013 |
+
},
|
3014 |
+
{
|
3015 |
+
"epoch": 2.4700813786500717,
|
3016 |
+
"grad_norm": 0.2089974731206894,
|
3017 |
+
"learning_rate": 9.197744443594003e-07,
|
3018 |
+
"loss": 0.3525,
|
3019 |
+
"step": 430
|
3020 |
+
},
|
3021 |
+
{
|
3022 |
+
"epoch": 2.475825753949258,
|
3023 |
+
"grad_norm": 0.20910853147506714,
|
3024 |
+
"learning_rate": 9.005078816328772e-07,
|
3025 |
+
"loss": 0.4122,
|
3026 |
+
"step": 431
|
3027 |
+
},
|
3028 |
+
{
|
3029 |
+
"epoch": 2.4815701292484444,
|
3030 |
+
"grad_norm": 0.2021329402923584,
|
3031 |
+
"learning_rate": 8.814252615787661e-07,
|
3032 |
+
"loss": 0.3664,
|
3033 |
+
"step": 432
|
3034 |
+
},
|
3035 |
+
{
|
3036 |
+
"epoch": 2.4873145045476304,
|
3037 |
+
"grad_norm": 0.22205480933189392,
|
3038 |
+
"learning_rate": 8.625274404270662e-07,
|
3039 |
+
"loss": 0.4036,
|
3040 |
+
"step": 433
|
3041 |
+
},
|
3042 |
+
{
|
3043 |
+
"epoch": 2.4930588798468167,
|
3044 |
+
"grad_norm": 0.21877549588680267,
|
3045 |
+
"learning_rate": 8.438152661159165e-07,
|
3046 |
+
"loss": 0.3557,
|
3047 |
+
"step": 434
|
3048 |
+
},
|
3049 |
+
{
|
3050 |
+
"epoch": 2.4988032551460027,
|
3051 |
+
"grad_norm": 0.2282644808292389,
|
3052 |
+
"learning_rate": 8.252895782535569e-07,
|
3053 |
+
"loss": 0.3974,
|
3054 |
+
"step": 435
|
3055 |
+
},
|
3056 |
+
{
|
3057 |
+
"epoch": 2.504547630445189,
|
3058 |
+
"grad_norm": 0.19086650013923645,
|
3059 |
+
"learning_rate": 8.069512080806441e-07,
|
3060 |
+
"loss": 0.3348,
|
3061 |
+
"step": 436
|
3062 |
+
},
|
3063 |
+
{
|
3064 |
+
"epoch": 2.5102920057443754,
|
3065 |
+
"grad_norm": 0.22070568799972534,
|
3066 |
+
"learning_rate": 7.88800978432967e-07,
|
3067 |
+
"loss": 0.3948,
|
3068 |
+
"step": 437
|
3069 |
+
},
|
3070 |
+
{
|
3071 |
+
"epoch": 2.5160363810435618,
|
3072 |
+
"grad_norm": 0.2190258651971817,
|
3073 |
+
"learning_rate": 7.708397037045129e-07,
|
3074 |
+
"loss": 0.4058,
|
3075 |
+
"step": 438
|
3076 |
+
},
|
3077 |
+
{
|
3078 |
+
"epoch": 2.5217807563427477,
|
3079 |
+
"grad_norm": 0.19553068280220032,
|
3080 |
+
"learning_rate": 7.530681898109393e-07,
|
3081 |
+
"loss": 0.3539,
|
3082 |
+
"step": 439
|
3083 |
+
},
|
3084 |
+
{
|
3085 |
+
"epoch": 2.527525131641934,
|
3086 |
+
"grad_norm": 0.22662606835365295,
|
3087 |
+
"learning_rate": 7.35487234153402e-07,
|
3088 |
+
"loss": 0.4109,
|
3089 |
+
"step": 440
|
3090 |
+
},
|
3091 |
+
{
|
3092 |
+
"epoch": 2.53326950694112,
|
3093 |
+
"grad_norm": 0.229027658700943,
|
3094 |
+
"learning_rate": 7.180976255827809e-07,
|
3095 |
+
"loss": 0.4039,
|
3096 |
+
"step": 441
|
3097 |
+
},
|
3098 |
+
{
|
3099 |
+
"epoch": 2.5390138822403063,
|
3100 |
+
"grad_norm": 0.19389323890209198,
|
3101 |
+
"learning_rate": 7.009001443642843e-07,
|
3102 |
+
"loss": 0.3364,
|
3103 |
+
"step": 442
|
3104 |
+
},
|
3105 |
+
{
|
3106 |
+
"epoch": 2.5447582575394927,
|
3107 |
+
"grad_norm": 0.18867188692092896,
|
3108 |
+
"learning_rate": 6.838955621424404e-07,
|
3109 |
+
"loss": 0.3302,
|
3110 |
+
"step": 443
|
3111 |
+
},
|
3112 |
+
{
|
3113 |
+
"epoch": 2.550502632838679,
|
3114 |
+
"grad_norm": 0.22040928900241852,
|
3115 |
+
"learning_rate": 6.67084641906468e-07,
|
3116 |
+
"loss": 0.4042,
|
3117 |
+
"step": 444
|
3118 |
+
},
|
3119 |
+
{
|
3120 |
+
"epoch": 2.556247008137865,
|
3121 |
+
"grad_norm": 0.20983703434467316,
|
3122 |
+
"learning_rate": 6.50468137956049e-07,
|
3123 |
+
"loss": 0.3732,
|
3124 |
+
"step": 445
|
3125 |
+
},
|
3126 |
+
{
|
3127 |
+
"epoch": 2.5619913834370514,
|
3128 |
+
"grad_norm": 0.1974366009235382,
|
3129 |
+
"learning_rate": 6.340467958674762e-07,
|
3130 |
+
"loss": 0.3608,
|
3131 |
+
"step": 446
|
3132 |
+
},
|
3133 |
+
{
|
3134 |
+
"epoch": 2.5677357587362373,
|
3135 |
+
"grad_norm": 0.23682722449302673,
|
3136 |
+
"learning_rate": 6.178213524602061e-07,
|
3137 |
+
"loss": 0.396,
|
3138 |
+
"step": 447
|
3139 |
+
},
|
3140 |
+
{
|
3141 |
+
"epoch": 2.5734801340354236,
|
3142 |
+
"grad_norm": 0.21820193529129028,
|
3143 |
+
"learning_rate": 6.017925357637932e-07,
|
3144 |
+
"loss": 0.3567,
|
3145 |
+
"step": 448
|
3146 |
+
},
|
3147 |
+
{
|
3148 |
+
"epoch": 2.57922450933461,
|
3149 |
+
"grad_norm": 0.22981062531471252,
|
3150 |
+
"learning_rate": 5.859610649852249e-07,
|
3151 |
+
"loss": 0.3642,
|
3152 |
+
"step": 449
|
3153 |
+
},
|
3154 |
+
{
|
3155 |
+
"epoch": 2.584968884633796,
|
3156 |
+
"grad_norm": 0.2145363688468933,
|
3157 |
+
"learning_rate": 5.703276504766514e-07,
|
3158 |
+
"loss": 0.3495,
|
3159 |
+
"step": 450
|
3160 |
+
},
|
3161 |
+
{
|
3162 |
+
"epoch": 2.5907132599329823,
|
3163 |
+
"grad_norm": 0.19343282282352448,
|
3164 |
+
"learning_rate": 5.548929937035147e-07,
|
3165 |
+
"loss": 0.3339,
|
3166 |
+
"step": 451
|
3167 |
+
},
|
3168 |
+
{
|
3169 |
+
"epoch": 2.5964576352321687,
|
3170 |
+
"grad_norm": 0.20028981566429138,
|
3171 |
+
"learning_rate": 5.396577872130676e-07,
|
3172 |
+
"loss": 0.3781,
|
3173 |
+
"step": 452
|
3174 |
+
},
|
3175 |
+
{
|
3176 |
+
"epoch": 2.6022020105313546,
|
3177 |
+
"grad_norm": 0.20916354656219482,
|
3178 |
+
"learning_rate": 5.246227146033089e-07,
|
3179 |
+
"loss": 0.398,
|
3180 |
+
"step": 453
|
3181 |
+
},
|
3182 |
+
{
|
3183 |
+
"epoch": 2.607946385830541,
|
3184 |
+
"grad_norm": 0.19891418516635895,
|
3185 |
+
"learning_rate": 5.097884504922996e-07,
|
3186 |
+
"loss": 0.3522,
|
3187 |
+
"step": 454
|
3188 |
+
},
|
3189 |
+
{
|
3190 |
+
"epoch": 2.6136907611297273,
|
3191 |
+
"grad_norm": 0.22149665653705597,
|
3192 |
+
"learning_rate": 4.951556604879049e-07,
|
3193 |
+
"loss": 0.4384,
|
3194 |
+
"step": 455
|
3195 |
+
},
|
3196 |
+
{
|
3197 |
+
"epoch": 2.6194351364289132,
|
3198 |
+
"grad_norm": 0.20566165447235107,
|
3199 |
+
"learning_rate": 4.807250011579168e-07,
|
3200 |
+
"loss": 0.3747,
|
3201 |
+
"step": 456
|
3202 |
+
},
|
3203 |
+
{
|
3204 |
+
"epoch": 2.6251795117280996,
|
3205 |
+
"grad_norm": 0.20062044262886047,
|
3206 |
+
"learning_rate": 4.6649712000060297e-07,
|
3207 |
+
"loss": 0.3277,
|
3208 |
+
"step": 457
|
3209 |
+
},
|
3210 |
+
{
|
3211 |
+
"epoch": 2.6309238870272855,
|
3212 |
+
"grad_norm": 0.20748507976531982,
|
3213 |
+
"learning_rate": 4.5247265541564836e-07,
|
3214 |
+
"loss": 0.3733,
|
3215 |
+
"step": 458
|
3216 |
+
},
|
3217 |
+
{
|
3218 |
+
"epoch": 2.636668262326472,
|
3219 |
+
"grad_norm": 0.20769307017326355,
|
3220 |
+
"learning_rate": 4.386522366755169e-07,
|
3221 |
+
"loss": 0.3715,
|
3222 |
+
"step": 459
|
3223 |
+
},
|
3224 |
+
{
|
3225 |
+
"epoch": 2.6424126376256583,
|
3226 |
+
"grad_norm": 0.21871528029441833,
|
3227 |
+
"learning_rate": 4.250364838972065e-07,
|
3228 |
+
"loss": 0.4001,
|
3229 |
+
"step": 460
|
3230 |
+
},
|
3231 |
+
{
|
3232 |
+
"epoch": 2.6481570129248446,
|
3233 |
+
"grad_norm": 0.19270025193691254,
|
3234 |
+
"learning_rate": 4.116260080144352e-07,
|
3235 |
+
"loss": 0.3847,
|
3236 |
+
"step": 461
|
3237 |
+
},
|
3238 |
+
{
|
3239 |
+
"epoch": 2.6539013882240305,
|
3240 |
+
"grad_norm": 0.22346258163452148,
|
3241 |
+
"learning_rate": 3.98421410750221e-07,
|
3242 |
+
"loss": 0.3865,
|
3243 |
+
"step": 462
|
3244 |
+
},
|
3245 |
+
{
|
3246 |
+
"epoch": 2.659645763523217,
|
3247 |
+
"grad_norm": 0.2189069241285324,
|
3248 |
+
"learning_rate": 3.854232845898859e-07,
|
3249 |
+
"loss": 0.3777,
|
3250 |
+
"step": 463
|
3251 |
+
},
|
3252 |
+
{
|
3253 |
+
"epoch": 2.665390138822403,
|
3254 |
+
"grad_norm": 0.19334660470485687,
|
3255 |
+
"learning_rate": 3.7263221275447125e-07,
|
3256 |
+
"loss": 0.3738,
|
3257 |
+
"step": 464
|
3258 |
+
},
|
3259 |
+
{
|
3260 |
+
"epoch": 2.671134514121589,
|
3261 |
+
"grad_norm": 0.1997268944978714,
|
3262 |
+
"learning_rate": 3.60048769174568e-07,
|
3263 |
+
"loss": 0.3713,
|
3264 |
+
"step": 465
|
3265 |
+
},
|
3266 |
+
{
|
3267 |
+
"epoch": 2.6768788894207756,
|
3268 |
+
"grad_norm": 0.21596620976924896,
|
3269 |
+
"learning_rate": 3.4767351846456744e-07,
|
3270 |
+
"loss": 0.3963,
|
3271 |
+
"step": 466
|
3272 |
+
},
|
3273 |
+
{
|
3274 |
+
"epoch": 2.682623264719962,
|
3275 |
+
"grad_norm": 0.2167111337184906,
|
3276 |
+
"learning_rate": 3.355070158973212e-07,
|
3277 |
+
"loss": 0.3719,
|
3278 |
+
"step": 467
|
3279 |
+
},
|
3280 |
+
{
|
3281 |
+
"epoch": 2.688367640019148,
|
3282 |
+
"grad_norm": 0.22667430341243744,
|
3283 |
+
"learning_rate": 3.235498073792342e-07,
|
3284 |
+
"loss": 0.3777,
|
3285 |
+
"step": 468
|
3286 |
+
},
|
3287 |
+
{
|
3288 |
+
"epoch": 2.694112015318334,
|
3289 |
+
"grad_norm": 0.22425274550914764,
|
3290 |
+
"learning_rate": 3.118024294257621e-07,
|
3291 |
+
"loss": 0.3934,
|
3292 |
+
"step": 469
|
3293 |
+
},
|
3294 |
+
{
|
3295 |
+
"epoch": 2.69985639061752,
|
3296 |
+
"grad_norm": 0.21314410865306854,
|
3297 |
+
"learning_rate": 3.002654091373453e-07,
|
3298 |
+
"loss": 0.3514,
|
3299 |
+
"step": 470
|
3300 |
+
},
|
3301 |
+
{
|
3302 |
+
"epoch": 2.7056007659167065,
|
3303 |
+
"grad_norm": 0.20535998046398163,
|
3304 |
+
"learning_rate": 2.889392641757527e-07,
|
3305 |
+
"loss": 0.4027,
|
3306 |
+
"step": 471
|
3307 |
+
},
|
3308 |
+
{
|
3309 |
+
"epoch": 2.711345141215893,
|
3310 |
+
"grad_norm": 0.19453689455986023,
|
3311 |
+
"learning_rate": 2.778245027408566e-07,
|
3312 |
+
"loss": 0.3619,
|
3313 |
+
"step": 472
|
3314 |
+
},
|
3315 |
+
{
|
3316 |
+
"epoch": 2.7170895165150792,
|
3317 |
+
"grad_norm": 0.2257540374994278,
|
3318 |
+
"learning_rate": 2.669216235478295e-07,
|
3319 |
+
"loss": 0.4111,
|
3320 |
+
"step": 473
|
3321 |
+
},
|
3322 |
+
{
|
3323 |
+
"epoch": 2.722833891814265,
|
3324 |
+
"grad_norm": 0.22148896753787994,
|
3325 |
+
"learning_rate": 2.562311158047692e-07,
|
3326 |
+
"loss": 0.3853,
|
3327 |
+
"step": 474
|
3328 |
+
},
|
3329 |
+
{
|
3330 |
+
"epoch": 2.7285782671134515,
|
3331 |
+
"grad_norm": 0.23967334628105164,
|
3332 |
+
"learning_rate": 2.45753459190744e-07,
|
3333 |
+
"loss": 0.3789,
|
3334 |
+
"step": 475
|
3335 |
+
},
|
3336 |
+
{
|
3337 |
+
"epoch": 2.7343226424126374,
|
3338 |
+
"grad_norm": 0.21828240156173706,
|
3339 |
+
"learning_rate": 2.354891238342738e-07,
|
3340 |
+
"loss": 0.3635,
|
3341 |
+
"step": 476
|
3342 |
+
},
|
3343 |
+
{
|
3344 |
+
"epoch": 2.740067017711824,
|
3345 |
+
"grad_norm": 0.22273699939250946,
|
3346 |
+
"learning_rate": 2.254385702922318e-07,
|
3347 |
+
"loss": 0.3866,
|
3348 |
+
"step": 477
|
3349 |
+
},
|
3350 |
+
{
|
3351 |
+
"epoch": 2.74581139301101,
|
3352 |
+
"grad_norm": 0.21624407172203064,
|
3353 |
+
"learning_rate": 2.1560224952918373e-07,
|
3354 |
+
"loss": 0.3334,
|
3355 |
+
"step": 478
|
3356 |
+
},
|
3357 |
+
{
|
3358 |
+
"epoch": 2.751555768310196,
|
3359 |
+
"grad_norm": 0.20374171435832977,
|
3360 |
+
"learning_rate": 2.0598060289714893e-07,
|
3361 |
+
"loss": 0.333,
|
3362 |
+
"step": 479
|
3363 |
+
},
|
3364 |
+
{
|
3365 |
+
"epoch": 2.7573001436093825,
|
3366 |
+
"grad_norm": 0.21629025042057037,
|
3367 |
+
"learning_rate": 1.9657406211579966e-07,
|
3368 |
+
"loss": 0.4038,
|
3369 |
+
"step": 480
|
3370 |
+
},
|
3371 |
+
{
|
3372 |
+
"epoch": 2.763044518908569,
|
3373 |
+
"grad_norm": 0.19769959151744843,
|
3374 |
+
"learning_rate": 1.8738304925308926e-07,
|
3375 |
+
"loss": 0.3912,
|
3376 |
+
"step": 481
|
3377 |
+
},
|
3378 |
+
{
|
3379 |
+
"epoch": 2.7687888942077548,
|
3380 |
+
"grad_norm": 0.20424911379814148,
|
3381 |
+
"learning_rate": 1.7840797670631572e-07,
|
3382 |
+
"loss": 0.3857,
|
3383 |
+
"step": 482
|
3384 |
+
},
|
3385 |
+
{
|
3386 |
+
"epoch": 2.774533269506941,
|
3387 |
+
"grad_norm": 0.22209753096103668,
|
3388 |
+
"learning_rate": 1.6964924718361364e-07,
|
3389 |
+
"loss": 0.377,
|
3390 |
+
"step": 483
|
3391 |
+
},
|
3392 |
+
{
|
3393 |
+
"epoch": 2.7802776448061275,
|
3394 |
+
"grad_norm": 0.18916495144367218,
|
3395 |
+
"learning_rate": 1.6110725368589041e-07,
|
3396 |
+
"loss": 0.3567,
|
3397 |
+
"step": 484
|
3398 |
+
},
|
3399 |
+
{
|
3400 |
+
"epoch": 2.7860220201053134,
|
3401 |
+
"grad_norm": 0.21668356657028198,
|
3402 |
+
"learning_rate": 1.5278237948918585e-07,
|
3403 |
+
"loss": 0.4076,
|
3404 |
+
"step": 485
|
3405 |
+
},
|
3406 |
+
{
|
3407 |
+
"epoch": 2.7917663954045,
|
3408 |
+
"grad_norm": 0.20810151100158691,
|
3409 |
+
"learning_rate": 1.4467499812748143e-07,
|
3410 |
+
"loss": 0.3784,
|
3411 |
+
"step": 486
|
3412 |
+
},
|
3413 |
+
{
|
3414 |
+
"epoch": 2.7975107707036857,
|
3415 |
+
"grad_norm": 0.2041124403476715,
|
3416 |
+
"learning_rate": 1.3678547337593494e-07,
|
3417 |
+
"loss": 0.3887,
|
3418 |
+
"step": 487
|
3419 |
+
},
|
3420 |
+
{
|
3421 |
+
"epoch": 2.803255146002872,
|
3422 |
+
"grad_norm": 0.24317069351673126,
|
3423 |
+
"learning_rate": 1.2911415923456017e-07,
|
3424 |
+
"loss": 0.3808,
|
3425 |
+
"step": 488
|
3426 |
+
},
|
3427 |
+
{
|
3428 |
+
"epoch": 2.8089995213020584,
|
3429 |
+
"grad_norm": 0.19764302670955658,
|
3430 |
+
"learning_rate": 1.2166139991234227e-07,
|
3431 |
+
"loss": 0.3556,
|
3432 |
+
"step": 489
|
3433 |
+
},
|
3434 |
+
{
|
3435 |
+
"epoch": 2.814743896601245,
|
3436 |
+
"grad_norm": 0.22858253121376038,
|
3437 |
+
"learning_rate": 1.1442752981179527e-07,
|
3438 |
+
"loss": 0.3944,
|
3439 |
+
"step": 490
|
3440 |
+
},
|
3441 |
+
{
|
3442 |
+
"epoch": 2.8204882719004307,
|
3443 |
+
"grad_norm": 0.20770110189914703,
|
3444 |
+
"learning_rate": 1.0741287351395402e-07,
|
3445 |
+
"loss": 0.4109,
|
3446 |
+
"step": 491
|
3447 |
+
},
|
3448 |
+
{
|
3449 |
+
"epoch": 2.826232647199617,
|
3450 |
+
"grad_norm": 0.20658861100673676,
|
3451 |
+
"learning_rate": 1.0061774576381411e-07,
|
3452 |
+
"loss": 0.375,
|
3453 |
+
"step": 492
|
3454 |
+
},
|
3455 |
+
{
|
3456 |
+
"epoch": 2.831977022498803,
|
3457 |
+
"grad_norm": 0.21842309832572937,
|
3458 |
+
"learning_rate": 9.404245145620717e-08,
|
3459 |
+
"loss": 0.4091,
|
3460 |
+
"step": 493
|
3461 |
+
},
|
3462 |
+
{
|
3463 |
+
"epoch": 2.8377213977979894,
|
3464 |
+
"grad_norm": 0.20370720326900482,
|
3465 |
+
"learning_rate": 8.768728562211948e-08,
|
3466 |
+
"loss": 0.3252,
|
3467 |
+
"step": 494
|
3468 |
+
},
|
3469 |
+
{
|
3470 |
+
"epoch": 2.8434657730971757,
|
3471 |
+
"grad_norm": 0.20084549486637115,
|
3472 |
+
"learning_rate": 8.155253341545655e-08,
|
3473 |
+
"loss": 0.4111,
|
3474 |
+
"step": 495
|
3475 |
+
},
|
3476 |
+
{
|
3477 |
+
"epoch": 2.849210148396362,
|
3478 |
+
"grad_norm": 0.23278358578681946,
|
3479 |
+
"learning_rate": 7.563847010024716e-08,
|
3480 |
+
"loss": 0.3762,
|
3481 |
+
"step": 496
|
3482 |
+
},
|
3483 |
+
{
|
3484 |
+
"epoch": 2.854954523695548,
|
3485 |
+
"grad_norm": 0.20328493416309357,
|
3486 |
+
"learning_rate": 6.994536103829164e-08,
|
3487 |
+
"loss": 0.3765,
|
3488 |
+
"step": 497
|
3489 |
+
},
|
3490 |
+
{
|
3491 |
+
"epoch": 2.8606988989947344,
|
3492 |
+
"grad_norm": 0.20402538776397705,
|
3493 |
+
"learning_rate": 6.447346167725688e-08,
|
3494 |
+
"loss": 0.3449,
|
3495 |
+
"step": 498
|
3496 |
+
},
|
3497 |
+
{
|
3498 |
+
"epoch": 2.8664432742939203,
|
3499 |
+
"grad_norm": 0.22473885118961334,
|
3500 |
+
"learning_rate": 5.9223017539213335e-08,
|
3501 |
+
"loss": 0.4115,
|
3502 |
+
"step": 499
|
3503 |
+
},
|
3504 |
+
{
|
3505 |
+
"epoch": 2.8721876495931067,
|
3506 |
+
"grad_norm": 0.18734395503997803,
|
3507 |
+
"learning_rate": 5.4194264209617705e-08,
|
3508 |
+
"loss": 0.3474,
|
3509 |
+
"step": 500
|
3510 |
+
},
|
3511 |
+
{
|
3512 |
+
"epoch": 2.877932024892293,
|
3513 |
+
"grad_norm": 0.2044266164302826,
|
3514 |
+
"learning_rate": 4.9387427326745287e-08,
|
3515 |
+
"loss": 0.3939,
|
3516 |
+
"step": 501
|
3517 |
+
},
|
3518 |
+
{
|
3519 |
+
"epoch": 2.8836764001914794,
|
3520 |
+
"grad_norm": 0.21485216915607452,
|
3521 |
+
"learning_rate": 4.4802722571561374e-08,
|
3522 |
+
"loss": 0.4016,
|
3523 |
+
"step": 502
|
3524 |
+
},
|
3525 |
+
{
|
3526 |
+
"epoch": 2.8894207754906653,
|
3527 |
+
"grad_norm": 0.22615912556648254,
|
3528 |
+
"learning_rate": 4.044035565804793e-08,
|
3529 |
+
"loss": 0.3599,
|
3530 |
+
"step": 503
|
3531 |
+
},
|
3532 |
+
{
|
3533 |
+
"epoch": 2.8951651507898517,
|
3534 |
+
"grad_norm": 0.2036847621202469,
|
3535 |
+
"learning_rate": 3.6300522323969855e-08,
|
3536 |
+
"loss": 0.3266,
|
3537 |
+
"step": 504
|
3538 |
+
},
|
3539 |
+
{
|
3540 |
+
"epoch": 2.9009095260890376,
|
3541 |
+
"grad_norm": 0.2017732411623001,
|
3542 |
+
"learning_rate": 3.2383408322095856e-08,
|
3543 |
+
"loss": 0.4101,
|
3544 |
+
"step": 505
|
3545 |
+
},
|
3546 |
+
{
|
3547 |
+
"epoch": 2.906653901388224,
|
3548 |
+
"grad_norm": 0.19980326294898987,
|
3549 |
+
"learning_rate": 2.8689189411859607e-08,
|
3550 |
+
"loss": 0.3482,
|
3551 |
+
"step": 506
|
3552 |
+
},
|
3553 |
+
{
|
3554 |
+
"epoch": 2.9123982766874104,
|
3555 |
+
"grad_norm": 0.221945121884346,
|
3556 |
+
"learning_rate": 2.5218031351478268e-08,
|
3557 |
+
"loss": 0.4437,
|
3558 |
+
"step": 507
|
3559 |
+
},
|
3560 |
+
{
|
3561 |
+
"epoch": 2.9181426519865967,
|
3562 |
+
"grad_norm": 0.2403242588043213,
|
3563 |
+
"learning_rate": 2.1970089890509527e-08,
|
3564 |
+
"loss": 0.3528,
|
3565 |
+
"step": 508
|
3566 |
+
},
|
3567 |
+
{
|
3568 |
+
"epoch": 2.9238870272857826,
|
3569 |
+
"grad_norm": 0.22222495079040527,
|
3570 |
+
"learning_rate": 1.8945510762868325e-08,
|
3571 |
+
"loss": 0.3765,
|
3572 |
+
"step": 509
|
3573 |
+
},
|
3574 |
+
{
|
3575 |
+
"epoch": 2.929631402584969,
|
3576 |
+
"grad_norm": 0.20567429065704346,
|
3577 |
+
"learning_rate": 1.614442968028429e-08,
|
3578 |
+
"loss": 0.3844,
|
3579 |
+
"step": 510
|
3580 |
+
},
|
3581 |
+
{
|
3582 |
+
"epoch": 2.935375777884155,
|
3583 |
+
"grad_norm": 0.1993645876646042,
|
3584 |
+
"learning_rate": 1.3566972326214956e-08,
|
3585 |
+
"loss": 0.3712,
|
3586 |
+
"step": 511
|
3587 |
+
},
|
3588 |
+
{
|
3589 |
+
"epoch": 2.9411201531833413,
|
3590 |
+
"grad_norm": 0.21140974760055542,
|
3591 |
+
"learning_rate": 1.1213254350202486e-08,
|
3592 |
+
"loss": 0.357,
|
3593 |
+
"step": 512
|
3594 |
+
},
|
3595 |
+
{
|
3596 |
+
"epoch": 2.9468645284825277,
|
3597 |
+
"grad_norm": 0.20113182067871094,
|
3598 |
+
"learning_rate": 9.083381362690603e-09,
|
3599 |
+
"loss": 0.3692,
|
3600 |
+
"step": 513
|
3601 |
+
},
|
3602 |
+
{
|
3603 |
+
"epoch": 2.9526089037817136,
|
3604 |
+
"grad_norm": 0.2018483579158783,
|
3605 |
+
"learning_rate": 7.177448930279496e-09,
|
3606 |
+
"loss": 0.3817,
|
3607 |
+
"step": 514
|
3608 |
+
},
|
3609 |
+
{
|
3610 |
+
"epoch": 2.9583532790809,
|
3611 |
+
"grad_norm": 0.21870052814483643,
|
3612 |
+
"learning_rate": 5.495542571443135e-09,
|
3613 |
+
"loss": 0.3505,
|
3614 |
+
"step": 515
|
3615 |
+
},
|
3616 |
+
{
|
3617 |
+
"epoch": 2.9640976543800863,
|
3618 |
+
"grad_norm": 0.2133331298828125,
|
3619 |
+
"learning_rate": 4.037737752686788e-09,
|
3620 |
+
"loss": 0.3767,
|
3621 |
+
"step": 516
|
3622 |
+
},
|
3623 |
+
{
|
3624 |
+
"epoch": 2.9698420296792722,
|
3625 |
+
"grad_norm": 0.19916494190692902,
|
3626 |
+
"learning_rate": 2.8040998851674996e-09,
|
3627 |
+
"loss": 0.374,
|
3628 |
+
"step": 517
|
3629 |
+
},
|
3630 |
+
{
|
3631 |
+
"epoch": 2.9755864049784586,
|
3632 |
+
"grad_norm": 0.2075948715209961,
|
3633 |
+
"learning_rate": 1.7946843217514498e-09,
|
3634 |
+
"loss": 0.3572,
|
3635 |
+
"step": 518
|
3636 |
+
},
|
3637 |
+
{
|
3638 |
+
"epoch": 2.981330780277645,
|
3639 |
+
"grad_norm": 0.20564815402030945,
|
3640 |
+
"learning_rate": 1.009536354537044e-09,
|
3641 |
+
"loss": 0.3296,
|
3642 |
+
"step": 519
|
3643 |
+
},
|
3644 |
+
{
|
3645 |
+
"epoch": 2.987075155576831,
|
3646 |
+
"grad_norm": 0.22070202231407166,
|
3647 |
+
"learning_rate": 4.486912128182086e-10,
|
3648 |
+
"loss": 0.4276,
|
3649 |
+
"step": 520
|
3650 |
+
},
|
3651 |
+
{
|
3652 |
+
"epoch": 2.9928195308760173,
|
3653 |
+
"grad_norm": 0.19630590081214905,
|
3654 |
+
"learning_rate": 1.1217406150676457e-10,
|
3655 |
+
"loss": 0.3737,
|
3656 |
+
"step": 521
|
3657 |
+
},
|
3658 |
+
{
|
3659 |
+
"epoch": 2.998563906175203,
|
3660 |
+
"grad_norm": 0.20596085488796234,
|
3661 |
+
"learning_rate": 0.0,
|
3662 |
+
"loss": 0.3589,
|
3663 |
+
"step": 522
|
3664 |
+
},
|
3665 |
+
{
|
3666 |
+
"epoch": 2.998563906175203,
|
3667 |
+
"step": 522,
|
3668 |
+
"total_flos": 1806937112969216.0,
|
3669 |
+
"train_loss": 0.43391252791516166,
|
3670 |
+
"train_runtime": 96882.8659,
|
3671 |
+
"train_samples_per_second": 0.517,
|
3672 |
+
"train_steps_per_second": 0.005
|
3673 |
+
}
|
3674 |
+
],
|
3675 |
+
"logging_steps": 1,
|
3676 |
+
"max_steps": 522,
|
3677 |
+
"num_input_tokens_seen": 0,
|
3678 |
+
"num_train_epochs": 3,
|
3679 |
+
"save_steps": 100,
|
3680 |
+
"stateful_callbacks": {
|
3681 |
+
"TrainerControl": {
|
3682 |
+
"args": {
|
3683 |
+
"should_epoch_stop": false,
|
3684 |
+
"should_evaluate": false,
|
3685 |
+
"should_log": false,
|
3686 |
+
"should_save": true,
|
3687 |
+
"should_training_stop": true
|
3688 |
+
},
|
3689 |
+
"attributes": {}
|
3690 |
+
}
|
3691 |
+
},
|
3692 |
+
"total_flos": 1806937112969216.0,
|
3693 |
+
"train_batch_size": 1,
|
3694 |
+
"trial_name": null,
|
3695 |
+
"trial_params": null
|
3696 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c81c8ae17ee10b5fc27984fbb7ac6e98fa48f542abd934c57f93e87f13113668
|
3 |
+
size 7416
|
training_loss.png
ADDED
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|