jburtoft commited on
Commit
02a0504
1 Parent(s): 850aa19

0d4e15de2643ebc3b092474042ef1be9db02ef6ff8e538bb53babce74518efb7

Browse files
Files changed (26) hide show
  1. pytorch_model.bin/p258.model.layers.28.mlp.up_proj.weight +3 -0
  2. pytorch_model.bin/p259.model.layers.28.mlp.down_proj.weight +3 -0
  3. pytorch_model.bin/p26.model.layers.2.input_layernorm.weight +3 -0
  4. pytorch_model.bin/p260.model.layers.28.input_layernorm.weight +3 -0
  5. pytorch_model.bin/p261.model.layers.28.post_attention_layernorm.weight +3 -0
  6. pytorch_model.bin/p262.model.layers.29.self_attn.q_proj.weight +3 -0
  7. pytorch_model.bin/p263.model.layers.29.self_attn.k_proj.weight +3 -0
  8. pytorch_model.bin/p264.model.layers.29.self_attn.v_proj.weight +3 -0
  9. pytorch_model.bin/p265.model.layers.29.self_attn.o_proj.weight +3 -0
  10. pytorch_model.bin/p266.model.layers.29.mlp.gate_proj.weight +3 -0
  11. pytorch_model.bin/p267.model.layers.29.mlp.up_proj.weight +3 -0
  12. pytorch_model.bin/p268.model.layers.29.mlp.down_proj.weight +3 -0
  13. pytorch_model.bin/p269.model.layers.29.input_layernorm.weight +3 -0
  14. pytorch_model.bin/p27.model.layers.2.post_attention_layernorm.weight +3 -0
  15. pytorch_model.bin/p270.model.layers.29.post_attention_layernorm.weight +3 -0
  16. pytorch_model.bin/p271.model.layers.30.self_attn.q_proj.weight +3 -0
  17. pytorch_model.bin/p272.model.layers.30.self_attn.k_proj.weight +3 -0
  18. pytorch_model.bin/p273.model.layers.30.self_attn.v_proj.weight +3 -0
  19. pytorch_model.bin/p274.model.layers.30.self_attn.o_proj.weight +3 -0
  20. pytorch_model.bin/p275.model.layers.30.mlp.gate_proj.weight +3 -0
  21. pytorch_model.bin/p276.model.layers.30.mlp.up_proj.weight +3 -0
  22. pytorch_model.bin/p277.model.layers.30.mlp.down_proj.weight +3 -0
  23. pytorch_model.bin/p278.model.layers.30.input_layernorm.weight +3 -0
  24. pytorch_model.bin/p279.model.layers.30.post_attention_layernorm.weight +3 -0
  25. pytorch_model.bin/p28.model.layers.3.self_attn.q_proj.weight +3 -0
  26. pytorch_model.bin/p280.model.layers.31.self_attn.q_proj.weight +3 -0
pytorch_model.bin/p258.model.layers.28.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc5157c018247f2411893be51cb33bb2f5c255167dfef844fbf9f70750efe356
3
+ size 180355958
pytorch_model.bin/p259.model.layers.28.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aacdfcaf1740e3c4e410454d713bc9501df6ad478e111b4e32c9d43786349d12
3
+ size 180355964
pytorch_model.bin/p26.model.layers.2.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20e2a4925653483301b0d960e6df53e4c4f7406dc4f5713e0d989e1a7013850a
3
+ size 17276
pytorch_model.bin/p260.model.layers.28.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9518105dcb30f99ca6d670b3f81a3e714a154e32fad3a5745045d9bb72d360bb
3
+ size 17282
pytorch_model.bin/p261.model.layers.28.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:414086477c4af42a80931e32d448536a23de52429550c6262b9b4a3b90af19d4
3
+ size 17309
pytorch_model.bin/p262.model.layers.29.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8da47bff7b7b27a9ba92133052de00c5ca91f8594876e5bd8e7b9a5d0ee3a4a7
3
+ size 67109765
pytorch_model.bin/p263.model.layers.29.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53cd6fcf94240639a10378bc4e737cbbc1f29414e12e5df72691821ad7b24d9f
3
+ size 67109765
pytorch_model.bin/p264.model.layers.29.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3280c23092849154657f5913bcfd4c969803d22bce425d9a094651bcb5f4e51
3
+ size 67109765
pytorch_model.bin/p265.model.layers.29.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55ffe1fcfda9d5ea2a62b105ecea8ff218106f6a009f96bb316eadc69297f580
3
+ size 67109765
pytorch_model.bin/p266.model.layers.29.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ae23aeb3feadf2e012bde691c0810b5f40806faf3500f6075714844a748cf52
3
+ size 180355964
pytorch_model.bin/p267.model.layers.29.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d56722d90904b1d3dcecaa635d983c477e0b0c4f6ce49b2697519862ee8d484
3
+ size 180355958
pytorch_model.bin/p268.model.layers.29.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c052f98323bfb9075e194063e772363810f9c6cbf8d7c1cdea9f289f0d33a669
3
+ size 180355964
pytorch_model.bin/p269.model.layers.29.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc8fb2f3893c0985a672e9072df02d9f1e88f84a12478fd9c263b86105abd32d
3
+ size 17282
pytorch_model.bin/p27.model.layers.2.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93a78202db221fa5cb35f790ab68661a15e9fcbcb1ef8369790a2cb19aa936f3
3
+ size 17303
pytorch_model.bin/p270.model.layers.29.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:013941bed2fa4ee23be10bab1e916b7fb9fec840c997b8deee5a2b22b67802eb
3
+ size 17309
pytorch_model.bin/p271.model.layers.30.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b5a5eaa7cdebf8ac2c744387c60c82c5ef9cb0118578effa1677ec56d690665
3
+ size 67109765
pytorch_model.bin/p272.model.layers.30.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f60b7255bc2b1b893119b724c527b3383c1fccc857241487c49ba0dba9188be2
3
+ size 67109765
pytorch_model.bin/p273.model.layers.30.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:006b442a5d2996f1d06a792c04acfbc70132ef95d52446c1b9c7234a15b05b6f
3
+ size 67109765
pytorch_model.bin/p274.model.layers.30.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3df44502c5f4a62da0ddbb69b33077cc4ed6eb3103d19b7aa578c0fa90bfce7d
3
+ size 67109765
pytorch_model.bin/p275.model.layers.30.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2021e5aa69f2eb231eb37c0c9fef627d3938c3c8051f8c7b6fb4e4623eb364b2
3
+ size 180355964
pytorch_model.bin/p276.model.layers.30.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:121e86fc4a87c73661dd25a4d9c75159024c340cac4e4cfe5169bc82f428d994
3
+ size 180355958
pytorch_model.bin/p277.model.layers.30.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5eac72b7162bb1a3dd4050e2b97bc45fd230a3ce8dd17a13b9713242752b5de
3
+ size 180355964
pytorch_model.bin/p278.model.layers.30.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3afac8c9ad42a3c4e3d0594f71362cc304e0daec159ace3fe88ec0a160dcf813
3
+ size 17282
pytorch_model.bin/p279.model.layers.30.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79c63680f2dd10446c7ce4b29ba0c860ff3474345b4269be31fbaee665744fd0
3
+ size 17309
pytorch_model.bin/p28.model.layers.3.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91990c3f123482e53b7e01d7aea12bd7abf8fd4748c6afaab954019d6b751a82
3
+ size 67109759
pytorch_model.bin/p280.model.layers.31.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4d3df586572ff08a40d0c0e9ddbb97f775a112580b184f5c35603a383d1226e
3
+ size 67109765