File size: 441 Bytes
ff317b4
16746a2
ff317b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
  "base_model_name_or_path": "meta-llama/Meta-Llama-3-70B-Instruct",
  "architectures": [
    "MLPSpeculatorPreTrainedModel"
  ],
  "emb_dim": 8192,
  "inner_dim": 8192,
  "model_type": "mlp_speculator",
  "n_candidates": 4,
  "n_predict": 4,
  "scale_input": true,
  "tie_weights": true,
  "top_k_tokens_per_head": [
    4,
    3,
    2,
    2
  ],
  "torch_dtype": "float16",
  "transformers_version": "4.41.2",
  "vocab_size": 128256
}