nithiyn commited on
Commit
4588ed0
1 Parent(s): 748107f

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -35,3 +35,15 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  compiled/57352becff514d422065.neff filter=lfs diff=lfs merge=lfs -text
37
  compiled/6b86315eff70f70cdb82.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  compiled/57352becff514d422065.neff filter=lfs diff=lfs merge=lfs -text
37
  compiled/6b86315eff70f70cdb82.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/1490848b523a12b5041b.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/25fbc38e110240e4c644.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/6989e8293a1c2f7216d4.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/75b4177e54d3da05957e.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/7b9943b66d65f23ee419.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/7ec5a7d199f27379925d.neff filter=lfs diff=lfs merge=lfs -text
44
+ compiled/b63b81656983d63aa86d.neff filter=lfs diff=lfs merge=lfs -text
45
+ compiled/cff52905f4da4034b124.neff filter=lfs diff=lfs merge=lfs -text
46
+ compiled/d03e92620d47e92267a4.neff filter=lfs diff=lfs merge=lfs -text
47
+ compiled/d6407b57c2e615c6f238.neff filter=lfs diff=lfs merge=lfs -text
48
+ compiled/f85290f055dde229197d.neff filter=lfs diff=lfs merge=lfs -text
49
+ compiled/fc80db7af59e26d7c940.neff filter=lfs diff=lfs merge=lfs -text
compiled/1490848b523a12b5041b.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fc105bafe24cd7ca1971f3622295ccd4b7c8b3ae8e831d54fcfc0bc4ace78a7
3
+ size 11869184
compiled/25fbc38e110240e4c644.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3740a541d12f593935f6d63b2e6566ab777d23d719325ac526320b9f641456da
3
+ size 4506624
compiled/6989e8293a1c2f7216d4.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a16afe7d7035a48d9c34ad9c38544af43b4027161b5a3ecdb0c967a6d9a5293f
3
+ size 11899904
compiled/75b4177e54d3da05957e.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:601625d9f74dba9ad4c20a206c965fa1b42a87d64d8352371f1350ff8c20f587
3
+ size 2171904
compiled/7b9943b66d65f23ee419.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3a85b0e6a6d27b34819e995264c5b93c2e4ee484c725df7e3693341e8acadc9
3
+ size 1311744
compiled/7ec5a7d199f27379925d.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:891e4d6bfbf51518fd2337238240c2fbf3225819b0a528da1599c3e98268b642
3
+ size 12248064
compiled/b63b81656983d63aa86d.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cbefaccd65b2d5848ae75c6fb20ac3ede9d02da261d9e973f689fde798cb596
3
+ size 12606464
compiled/cff52905f4da4034b124.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02c9ef72bb162066ac465b184caf340e648023d85a275c76f9247cc8316e8873
3
+ size 18740224
compiled/d03e92620d47e92267a4.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f310a73a7ca60597ceea1e0c5acd87d7ee44a8e057c73550707335a8b3165d1
3
+ size 11951104
compiled/d6407b57c2e615c6f238.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66ab83b66b34cfe01b383952b8193045e3a1b66419f055962b3db4218b373516
3
+ size 8449024
compiled/f85290f055dde229197d.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ac2968fdb2b294f368eaadd6b8771fb8bbffff182c78494e9ffe2eb0a69628b
3
+ size 15739904
compiled/fc80db7af59e26d7c940.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f99c8a745526b15d7281a715bedb6f3ada7802a6113b8836ab5ad04a77ff2270
3
+ size 12043264
config.json CHANGED
@@ -1,34 +1,38 @@
1
  {
2
- "_name_or_path": "mistralai/Mistral-Large-Instruct-2407",
3
  "architectures": [
4
- "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
- "head_dim": 128,
10
  "hidden_act": "silu",
11
- "hidden_size": 12288,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 28672,
14
- "max_position_embeddings": 131072,
15
- "model_type": "mistral",
16
  "neuron": {
17
  "auto_cast_type": "bf16",
18
  "batch_size": 4,
19
- "checkpoint_id": "mistralai/Mistral-Large-Instruct-2407",
20
- "checkpoint_revision": "566a5a1fa869af019a1d937c231a76dbcb4b0d24",
21
  "compiler_type": "neuronx-cc",
22
  "compiler_version": "2.15.128.0+56dc5a86",
23
  "num_cores": 24,
24
  "sequence_length": 4096,
25
  "task": "text-generation"
26
  },
27
- "num_attention_heads": 96,
28
- "num_hidden_layers": 88,
 
29
  "num_key_value_heads": 8,
 
 
30
  "rms_norm_eps": 1e-05,
31
  "rope_theta": 1000000.0,
 
 
32
  "sliding_window": null,
33
  "tie_word_embeddings": false,
34
  "torch_dtype": "bfloat16",
 
1
  {
2
+ "_name_or_path": "mistralai/Mixtral-8x22B-Instruct-v0.1",
3
  "architectures": [
4
+ "MixtralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
 
9
  "hidden_act": "silu",
10
+ "hidden_size": 6144,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 16384,
13
+ "max_position_embeddings": 65536,
14
+ "model_type": "mixtral",
15
  "neuron": {
16
  "auto_cast_type": "bf16",
17
  "batch_size": 4,
18
+ "checkpoint_id": "mistralai/Mixtral-8x22B-Instruct-v0.1",
19
+ "checkpoint_revision": "a46959a1a02a9247294f5e141a4f3270059c6b32",
20
  "compiler_type": "neuronx-cc",
21
  "compiler_version": "2.15.128.0+56dc5a86",
22
  "num_cores": 24,
23
  "sequence_length": 4096,
24
  "task": "text-generation"
25
  },
26
+ "num_attention_heads": 48,
27
+ "num_experts_per_tok": 2,
28
+ "num_hidden_layers": 56,
29
  "num_key_value_heads": 8,
30
+ "num_local_experts": 8,
31
+ "output_router_logits": false,
32
  "rms_norm_eps": 1e-05,
33
  "rope_theta": 1000000.0,
34
+ "router_aux_loss_coef": 0.001,
35
+ "router_jitter_noise": 0.0,
36
  "sliding_window": null,
37
  "tie_word_embeddings": false,
38
  "torch_dtype": "bfloat16",
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
- size 587583
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37f00374dea48658ee8f5d0f21895b9bc55cb0103939607c8185bfd1c6ca1f89
3
+ size 587404
tokenizer_config.json CHANGED
The diff for this file is too large to render. See raw diff