1231czx commited on
Commit
ca19e04
·
verified ·
1 Parent(s): 16b7989

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/wexiong_google_com/sft/RLHF-Reward-Modeling/pair-pm/llama31_packed_8192_math_bz32_lr2e6_no_self_correct/checkpoint-2238",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -11,7 +11,7 @@
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
- "max_position_embeddings": 131072,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
@@ -19,13 +19,7 @@
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
- "rope_scaling": {
23
- "factor": 8.0,
24
- "high_freq_factor": 4.0,
25
- "low_freq_factor": 1.0,
26
- "original_max_position_embeddings": 8192,
27
- "rope_type": "llama3"
28
- },
29
  "rope_theta": 500000.0,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "bfloat16",
 
1
  {
2
+ "_name_or_path": "/home/wexiong_google_com/sft/RLHF-Reward-Modeling/pair-pm/llama3_packed_8192_math_bz32_lr2e6_no_self_correct/checkpoint-1053",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
+ "max_position_embeddings": 8192,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
 
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
 
 
 
 
 
 
23
  "rope_theta": 500000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
generation_config.json CHANGED
@@ -3,9 +3,9 @@
3
  "do_sample": true,
4
  "eos_token_id": [
5
  128001,
6
- 128008,
7
  128009
8
  ],
 
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
  "transformers_version": "4.44.1"
 
3
  "do_sample": true,
4
  "eos_token_id": [
5
  128001,
 
6
  128009
7
  ],
8
+ "max_length": 4096,
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
  "transformers_version": "4.44.1"
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f980a7f3709b61c9e130d0127ab1b859cbcc5b15e65a6091bd1da33d22b980e2
3
  size 4976698672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3e56ae776f0c1609d28e43e913adbec972768d5d2aad462cf3d060d21574433
3
  size 4976698672
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:44b800f7921dabc29f0fe25d0c1a0ff02cc017f32a1428e4e624db77a0c7975c
3
  size 4999802720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acf235cbac5e0e7cb4a6c63b5b3944e701319551a94b9de3fb3ab12fca982e5d
3
  size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:332450580c2b4e546068802ea15e10d6d2f12a9b9ea4b28fd2ae175d88e22503
3
  size 4915916176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9374f3e82fd2ff83dece2a140d34bc00906ecfed532a4580002da5be16a8c9f
3
  size 4915916176
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f21187ff2448194b77293f837bc8b3833f8cd2d82f98dd1ad0396c111ca6499
3
  size 1168138808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2bf98081cbd0f673b384127233f54a0f3a0a2ba755f67dbb4fccc45acbf4983
3
  size 1168138808