mariordoniez commited on
Commit
9cbb778
1 Parent(s): 270412b

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -5
config.json CHANGED
@@ -1,12 +1,12 @@
1
  {
2
- "_name_or_path": "microsoft/phi-1_5",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "MixFormerSequentialForCausalLM"
6
  ],
7
  "auto_map": {
8
- "AutoConfig": "microsoft/phi-1_5--configuration_mixformer_sequential.MixFormerSequentialConfig",
9
- "AutoModelForCausalLM": "microsoft/phi-1_5--modeling_mixformer_sequential.MixFormerSequentialForCausalLM"
10
  },
11
  "embd_pdrop": 0.0,
12
  "initializer_range": 0.02,
@@ -14,12 +14,10 @@
14
  "model_type": "mixformer-sequential",
15
  "n_embd": 2048,
16
  "n_head": 32,
17
- "revision": "271c3397ab4e1f8f4e49868b1e8ba0be95363c88",
18
  "n_inner": null,
19
  "n_layer": 24,
20
  "n_positions": 2048,
21
  "resid_pdrop": 0.0,
22
- "revision":"d38e6f954ec29b96fe2cf033937dad64e279b5d9",
23
  "rotary_dim": 32,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
 
1
  {
2
+ "_name_or_path": "mariordoniez/phi",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "MixFormerSequentialForCausalLM"
6
  ],
7
  "auto_map": {
8
+ "AutoConfig": "mariordoniez/phi--configuration_mixformer_sequential.MixFormerSequentialConfig",
9
+ "AutoModelForCausalLM": "mariordoniez/phi--modeling_mixformer_sequential.MixFormerSequentialForCausalLM"
10
  },
11
  "embd_pdrop": 0.0,
12
  "initializer_range": 0.02,
 
14
  "model_type": "mixformer-sequential",
15
  "n_embd": 2048,
16
  "n_head": 32,
 
17
  "n_inner": null,
18
  "n_layer": 24,
19
  "n_positions": 2048,
20
  "resid_pdrop": 0.0,
 
21
  "rotary_dim": 32,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "float32",