at676 commited on
Commit
eb57800
1 Parent(s): 07ec3af

Upload folder using huggingface_hub (#1)

Browse files

- bf7128598f17e87a8a550f9f1d61522c7bf3c4592b45fce5f4793c1b1c2695fa (499b894ad8f17cd62dcb9454c5be676590aa654e)
- 32364238436f6f05c2be94d3024c0d719f894ea939cfac38480a31b4dc83e34d (b44a8e46a4dc8f38031af52dab533361ea8e1126)
- e9effe82338d835856655810c84ce933453f32953aab4626e4615c54b031e5df (084392161bcba9916363c57c0ea81b6c6bc8e47e)
- af45f83bddd771c4ae30f222a1e2625832805b64fc2bf806c4f4f1f839e5453e (586ccaf3842a876fb0fc858a25f9e09a2a5ad1fd)
- cdb6530a184a7a29cbabac1208abd382b598d8a501ee23d9a6a86142e2807cd2 (4bedd193dcd1f8f70489a313dd8372efd7f3bf8a)

config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "meta-llama/Llama-2-70b-chat-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 8192,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 28672,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 64,
16
+ "num_hidden_layers": 80,
17
+ "num_key_value_heads": 8,
18
+ "pretraining_tp": 1,
19
+ "quip_params": {
20
+ "codebook": "E8P12RVQ4B",
21
+ "codebook_version": 1,
22
+ "codesz": 8,
23
+ "fused": true,
24
+ "idx_dtype": "torch.int64",
25
+ "lora_rank": 0,
26
+ "model_version": 1,
27
+ "outlier_channel_split": false,
28
+ "packsz": 2,
29
+ "rescale_WH": false,
30
+ "resid_scale_override": 3.6
31
+ },
32
+ "rms_norm_eps": 1e-05,
33
+ "rope_scaling": null,
34
+ "rope_theta": 10000.0,
35
+ "tie_word_embeddings": false,
36
+ "torch_dtype": "float16",
37
+ "transformers_version": "4.34.0",
38
+ "use_cache": true,
39
+ "vocab_size": 32000
40
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 4096,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9,
9
+ "transformers_version": "4.34.0"
10
+ }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aafe55f9ca40da2e57969084230c7c37861352e1722671b18afa2ae3e6b624ac
3
+ size 9985104748
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06a037a631ce3dc325e6173b9f72fcb60c83dca65a0523926785edf596cc323d
3
+ size 9880552968
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf5cca85c0285b3da27e7971eb684ac15e26c26e4634f6064db87e6ab1f77ca4
3
+ size 9846965214
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57b08f129f5d0c95414aef96f6454aa9d0a347d6f8ad3655c8cacc86c66ab1b1
3
+ size 5586283278
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff