michaelfeil commited on
Commit
6774b0d
1 Parent(s): 0e6f0b4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/tmp/model",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -13,6 +13,17 @@
13
  "intermediate_size": 28672,
14
  "max_position_embeddings": 4096,
15
  "model_type": "llama",
 
 
 
 
 
 
 
 
 
 
 
16
  "num_attention_heads": 64,
17
  "num_hidden_layers": 80,
18
  "num_key_value_heads": 8,
 
1
  {
2
+ "_name_or_path": "gradientai/v-alpha-tross",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
13
  "intermediate_size": 28672,
14
  "max_position_embeddings": 4096,
15
  "model_type": "llama",
16
+ "neuron": {
17
+ "auto_cast_type": "bf16",
18
+ "batch_size": 1,
19
+ "checkpoint_id": "gradientai/v-alpha-tross",
20
+ "checkpoint_revision": "6d51ed47fe16e37d819dc52c8c55fd7a7150d2b3",
21
+ "compiler_type": "neuronx-cc",
22
+ "compiler_version": "2.12.68.0+4480452af",
23
+ "num_cores": 24,
24
+ "sequence_length": 2048,
25
+ "task": "text-generation"
26
+ },
27
  "num_attention_heads": 64,
28
  "num_hidden_layers": 80,
29
  "num_key_value_heads": 8,