Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +8 -0
- checkpoint/config.json +28 -0
- checkpoint/generation_config.json +10 -0
- checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
- checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
- checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
compiled/0c7d8405434c674e9fd7.neff filter=lfs diff=lfs merge=lfs -text
|
37 |
+
compiled/1d026593a87f2bed7c2f.neff filter=lfs diff=lfs merge=lfs -text
|
38 |
+
compiled/5779985e1c4daa2f8399.neff filter=lfs diff=lfs merge=lfs -text
|
39 |
+
compiled/76d41855347b7535d906.neff filter=lfs diff=lfs merge=lfs -text
|
40 |
+
compiled/930b8ce92d83199ad197.neff filter=lfs diff=lfs merge=lfs -text
|
41 |
+
compiled/97f45013811c66c53e2c.neff filter=lfs diff=lfs merge=lfs -text
|
42 |
+
compiled/98091d94917df3056904.neff filter=lfs diff=lfs merge=lfs -text
|
43 |
+
compiled/e4ca6aaa2141a23ee0cc.neff filter=lfs diff=lfs merge=lfs -text
|
checkpoint/config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "Watashiha-Llama-2-13B-Ogiri-sft",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 1,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 5120,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 13824,
|
14 |
+
"max_position_embeddings": 4096,
|
15 |
+
"model_type": "llama",
|
16 |
+
"num_attention_heads": 40,
|
17 |
+
"num_hidden_layers": 40,
|
18 |
+
"num_key_value_heads": 40,
|
19 |
+
"pretraining_tp": 1,
|
20 |
+
"rms_norm_eps": 1e-05,
|
21 |
+
"rope_scaling": null,
|
22 |
+
"rope_theta": 10000.0,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "float32",
|
25 |
+
"transformers_version": "4.36.2",
|
26 |
+
"use_cache": true,
|
27 |
+
"vocab_size": 45046
|
28 |
+
}
|
checkpoint/generation_config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token_id": 1,
|
3 |
+
"do_sample": true,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"max_length": 4096,
|
6 |
+
"pad_token_id": 0,
|
7 |
+
"temperature": 0.6,
|
8 |
+
"top_p": 0.9,
|
9 |
+
"transformers_version": "4.36.2"
|
10 |
+
}
|
checkpoint/pytorch_model.bin/key_to_filename.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:accc6d0b7e8ac18844510c952c638af18d0928c1f979af789d331fe83a76804b
|
3 |
+
size 33029
|
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:64ecfad9c41ef2bfe4c9d887b979d56a9cfbc9b6c02b8bc2abe87cd2b0cb0b61
|
3 |
+
size 922542869
|
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:726a9e260ce1b9017ff82c2b72a81e282db382cad12b140968b974e1f5bf45df
|
3 |
+
size 104858492
|
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ecff5ed375808af96dcf17e9a9583ff835f3cef454806268a85923548f7d4901
|
3 |
+
size 104858495
|
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da10ef998ccc7e6d6fe47c0d994d922b80161d36f52b10e7eb5e6091e155dc28
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a0afbfa2a5a95cbfb74487c7e0fe116b98e2751059db2546a6a9918763222d61
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:942f13d8919e5e0157a83f79107bdb415a296a896205e6257923381300af54c1
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1c5c158684cc185f9cc9d7495aceca7d87d6193e35d8c82d850d2244bfe88a47
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2dbbad5e52284512dcad74bf1f226decc8f560969d22bbf70f7e1229fdbb85cc
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4fd4fc916b286b08f482b2a2ee0583da9e0daf0e65dd4fe3fe2de27a4ab6cb6f
|
3 |
+
size 283116406
|
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:362f86620b009bf5f61060a455fde89cfbbefe9cbb6842eb262668b05effe81a
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc2dd17c07362f315b3d17b6c4621f7bfc82ed3d191782b3b86c0be38267d33f
|
3 |
+
size 21378
|
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1643220ae9187669248332c247ebf5c6722628479d2174d645b8c4b31f2bf897
|
3 |
+
size 21405
|
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:50921e04916b753ff2710fc12dbe2b342aac5b8400fa60f0eb60b00aba54b8c3
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9ef804d7913f0cddb84d673d7aeff954fba43dcb3bfaca879f9a5e2fdcacb4eb
|
3 |
+
size 104858495
|
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ebc9d40b2b389dd06005053188fcc3c0ff5af336b5e23a55c614f99400ca53f5
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ebe62c74d75dbb67fe8fff82236e033ad53cc47bc49247bd41c5bca271d061c5
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5e9ffaad624a1333af9a2aad726a0780526ff0f91975acaec1daf988d8408fb5
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d2a708c8420c427fa9214485dce35087487dcfde8b5d7c87b7c7540f41d164f6
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aa4b37c2f1a275aff6e7e49e3be5236493d509a385a43b0168ba93894c98578c
|
3 |
+
size 283116406
|
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2823c1ef98bda43c87003fe6bcea0f402f43c8890a4f7f0cf3b60e6197296e22
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b48795a9fa89f6733e1f194bdff5bf1fef1d5cbe934ae34905f7b82e3a09480e
|
3 |
+
size 21378
|
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:add6d53deab966b42e81b98b97cc47f267f38c5b27c828a9abd5304a2642cfec
|
3 |
+
size 21405
|
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:41f408bb0972bb8978ce985eb9195dba3d6a03bcb307fccb2716fe9fe132342d
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:90bd88a148e441c3552673df2b3ba7a7c3f7308a44233fdbeedd004eaad6a2f8
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:52f7ad9288b0c9179df4673bca58c98c97c72ab67d6c264fee1e366ea0baf686
|
3 |
+
size 104858495
|
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6eae706a4f8795f3a1622791a48b0a6e330e0d1593c663e5c89d14f86b4bec91
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b528413d10ffc3aaaaa4a8ae9210895b9c32ca50e9849e0617db0d1dc38c6a74
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bc03d38ff73a98c240507e5fd95fb56436653d566d7143e89c44d3ecbed58a33
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:38602bc56ba619d5dd985d3efe8ee5da5e1e6749acdeece3a8257e14f1c127d8
|
3 |
+
size 283116406
|
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7f878d8924063e564cfee1dc06d27b04d263f8f8edc03f741137375d184ca4a
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:44cacd9b36ac3ae1fed4d46e95a532b435655354607bb80637673733bc8ad779
|
3 |
+
size 21378
|
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eccc77f731a3198c0370fbc4c90e2edbd95c8264bb26773ae4fb72e706a615be
|
3 |
+
size 21405
|
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:521bba3acdd824655d6b76cde8a393f4c62e8dac3b79835ce99aafc3d41c0086
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab3449c176b27612bffc8531e21aa18f24802cea7a4179a14d61387b54190471
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f809787ad86169b5553dbfb05160f5e830bad4c97a4144503740561653c54c2
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6438fd5ee2d80ac173e86d5ba0f7e39a2216d3e436bda3df46730079110e4d30
|
3 |
+
size 104858495
|
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f072620a0c2e90637d98dcf18a2991bfc83eb2f2f4fa737af6fc97f236a4526
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e9955b67d3f128c85c9a6f29ab269ec5a4c1588f1c7807fd8a6ddc55e3ca851a
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30965e54a57ca36889a2706f820820ac6ce89b0bf77ecb004f09e119d8809a89
|
3 |
+
size 283116406
|
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:03f8413d6cd8f28f2e3d9b1b32b8605b335c7637290eaf97faf5ae49cdcc0e61
|
3 |
+
size 283116412
|
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8ed6f3ddbb5db43ec924c784dafaab86f7a554ce31b1ad1b282915e8da44ad5
|
3 |
+
size 21378
|
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:64298a317449d6198113490bc63753f0ffa8e20b90595bc78fa9e2425b0af932
|
3 |
+
size 21405
|
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3356e82ff2ce6dc736ea9ce045920ebee7e32a9f79b3351b12d798b865947691
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a86e551e12d4abe3d604c1adcd6a7e5140fab4e8f3edabe5fd55559d6c73cef
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3485a83f2f296db118c68ecb92691b114f7bdbedc31c1fdb7b5b55e3da4725f1
|
3 |
+
size 104858501
|
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f46b9d130c75d6271b6beb19bfbde20a1ac3f5b56999afb05463787f5035f59
|
3 |
+
size 104858501
|