neeleshg23 commited on
Commit
86c271c
1 Parent(s): 4850dd4

Fixing vocab size

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "PeftModel"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
@@ -34,5 +34,5 @@
34
  "torch_dtype": "bfloat16",
35
  "transformers_version": "4.42.3",
36
  "use_cache": true,
37
- "vocab_size": 128256
38
  }
 
1
  {
2
  "architectures": [
3
+ "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
34
  "torch_dtype": "bfloat16",
35
  "transformers_version": "4.42.3",
36
  "use_cache": true,
37
+ "vocab_size": 128258
38
  }