jburtoft commited on
Commit
5065db7
1 Parent(s): 5859300

f7a574324212988ab1253e410c391c52fc263936f123cf6cfba02c2e7b5d3f42

Browse files
.gitattributes CHANGED
@@ -33,3 +33,15 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/0b2352566796e812d0d8.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/1aed84373d84de16c05b.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/395425895c871a0d8f5e.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/3d4c056411577db6cd3b.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/80dc12485d570fe089f2.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/84087cfe60b043deda3c.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/96b8b9213ccdf2b20d74.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/9c744de6b7316a2025b1.neff filter=lfs diff=lfs merge=lfs -text
44
+ compiled/a0a00cf0c124a9a6699e.neff filter=lfs diff=lfs merge=lfs -text
45
+ compiled/b1df3d608830a52bdeaa.neff filter=lfs diff=lfs merge=lfs -text
46
+ compiled/c641fdd442f297442c76.neff filter=lfs diff=lfs merge=lfs -text
47
+ compiled/d8c34a10f3e1a8067bc2.neff filter=lfs diff=lfs merge=lfs -text
checkpoint/pytorch_model.bin/p96.model.layers.10.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74683202298c1169204db85d4ed1ec4a3a290842514f9d0f08712a2847cff978
3
+ size 234881907
checkpoint/pytorch_model.bin/p97.model.layers.10.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33b18e40da9aa716cc43ad956c29e5803432bfa92e3e2ff8faab1954d7328689
3
+ size 234881913
checkpoint/pytorch_model.bin/p98.model.layers.10.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bea41eed83a51615160744f6eb14caefc5be5dc2b34fc1545f984e8ce61a2c3
3
+ size 17279
checkpoint/pytorch_model.bin/p99.model.layers.10.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76d6dba5a09d7bee70951d68ce54cf1f362cf3ccfacda246b10d9bd3055123e9
3
+ size 17306
compiled/0b2352566796e812d0d8.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87f073d29ed7d394644c4fe96365069b9d0a8accd27efd5e088ba0f29de991d5
3
+ size 3503104
compiled/1aed84373d84de16c05b.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da552e435e1e217a5236e080486d7d571449d57b12cb96b18b457e2aaf9a5b9e
3
+ size 3779584
compiled/395425895c871a0d8f5e.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:377ffa0b9a6091b4121986338508684675319c33a7bbf0982cf943c1ef168b32
3
+ size 3892224
compiled/3d4c056411577db6cd3b.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf4106dd7c7cd9bba68e60a8bd468f1e4dbbe5f6ed793bb24edb9ad4ce10224a
3
+ size 5336064
compiled/80dc12485d570fe089f2.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bb5da3b5c5c8b39dfef99feff27f410c8e19fbcf0bd06dd4678cac3a08fff1f
3
+ size 4086784
compiled/84087cfe60b043deda3c.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cc2f23aea5b48e3c1b4fbc9f954edc7dea5d08956a5e1cdc8de8496750045c7
3
+ size 3492864
compiled/96b8b9213ccdf2b20d74.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68a37aecfdb89c350b83223dc3391bac9a75f68444c2c733b315b3ce01a9f2d8
3
+ size 3625984
compiled/9c744de6b7316a2025b1.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efbdae86934a0df4476b4ba5ddd0eb6cdeee2adb214b1e1d7cda199d0f7f20ee
3
+ size 4281344
compiled/a0a00cf0c124a9a6699e.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bc89bedbb5047096ad047013332ba5f1978c86b0d3b704b27f7eb2e8753a4d4
3
+ size 20532224
compiled/b1df3d608830a52bdeaa.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e8df2c067ad2ca384f63cbcda58ccdb81d357ed50c1ca45e47a50bb401268cd
3
+ size 3789824
compiled/c641fdd442f297442c76.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:015a870532764687da684277591f62e009833ffdec04a6a1397f808d145b6439
3
+ size 9513984
compiled/d8c34a10f3e1a8067bc2.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0c1a3a1ac36fab4bf9d8716e6409f26049b89c12dce7aa9e0436c12c81c3fb0
3
+ size 3544064
config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "SOLAR-10.7B-v1.0-neuron-24xlarge-2.16-8core-4096/config.json",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "neuron": {
16
+ "auto_cast_type": "fp16",
17
+ "batch_size": 1,
18
+ "compiler_type": "neuronx-cc",
19
+ "compiler_version": "2.12.54.0+f631c2365",
20
+ "num_cores": 8,
21
+ "sequence_length": 4096,
22
+ "task": "text-generation"
23
+ },
24
+ "num_attention_heads": 32,
25
+ "num_hidden_layers": 48,
26
+ "num_key_value_heads": 8,
27
+ "pretraining_tp": 1,
28
+ "rms_norm_eps": 1e-05,
29
+ "rope_scaling": null,
30
+ "rope_theta": 10000.0,
31
+ "tie_word_embeddings": false,
32
+ "torch_dtype": "float16",
33
+ "transformers_version": "4.35.0",
34
+ "use_cache": false,
35
+ "vocab_size": 32000
36
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.35.0",
6
+ "use_cache": false
7
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<unk>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<s>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "</s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ }
27
+ },
28
+ "additional_special_tokens": [],
29
+ "bos_token": "<s>",
30
+ "clean_up_tokenization_spaces": false,
31
+ "eos_token": "</s>",
32
+ "legacy": true,
33
+ "model_max_length": 1000000000000000019884624838656,
34
+ "pad_token": null,
35
+ "sp_model_kwargs": {},
36
+ "spaces_between_special_tokens": false,
37
+ "tokenizer_class": "LlamaTokenizer",
38
+ "unk_token": "<unk>",
39
+ "use_default_system_prompt": true
40
+ }