NanQiangHF commited on
Commit
4fafb6f
1 Parent(s): 04a72b5

Upload LlamaForSequenceClassification

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
3
  "architectures": [
4
- "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
1
  {
2
  "_name_or_path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
3
  "architectures": [
4
+ "LlamaForSequenceClassification"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f296ae49eff50c35e71e0dd3a3990f81d4a7d6dd9de419846d06f221e270acd5
3
  size 4976698672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2fa5ba230621270dfb486579f8269c0f3cdd13467169bad521ae111ba26efee
3
  size 4976698672
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:58ee830c15b61b7eacb08a1dab095cd64766d80c2390d53145edba731ced337a
3
  size 4999802720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:818ebeaa1bfeed8447cd03276c0018b208affb66a4293ce097aa7bef4958857b
3
  size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ec364267610e959d370ef5924b2e0789ecbbbc1fa5535a75006ac247588e882f
3
  size 4915916176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9e604b3bddcb5ac8c41c019097b69186b135a6cbeef70208585ba2ab39cc50d
3
  size 4915916176
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:92ecfe1a2414458b4821ac8c13cf8cb70aed66b5eea8dc5ad9eeb4ff309d6d7b
3
- size 1168138808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1356647d29bde74b63e4942899c7f16179610fdbd4e188fdd1d47fe7c73e95c3
3
+ size 117482016
model.safetensors.index.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
  "metadata": {
3
- "total_size": 16060522496
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00004-of-00004.safetensors",
7
  "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
  "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
@@ -293,6 +292,7 @@
293
  "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
294
  "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
295
  "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
296
- "model.norm.weight": "model-00004-of-00004.safetensors"
 
297
  }
298
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 15009865728
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
7
  "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
8
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
 
292
  "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
293
  "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
294
  "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
295
+ "model.norm.weight": "model-00004-of-00004.safetensors",
296
+ "score.weight": "model-00004-of-00004.safetensors"
297
  }
298
  }