qubvel-hf HF staff commited on
Commit
ceff40a
1 Parent(s): 26019d1

Update parameters (#7)

Browse files

- Update parameters (a46f9887813bd734de136d51f3220213e4004a87)

Files changed (1) hide show
  1. config.json +4 -3
config.json CHANGED
@@ -55,7 +55,7 @@
55
  },
56
  "length_penalty": 1.0,
57
  "max_length": 20,
58
- "max_position_embeddings": 16384,
59
  "min_length": 0,
60
  "model_type": "mllama_text_model",
61
  "no_repeat_ngram_size": 0,
@@ -95,7 +95,7 @@
95
  "tokenizer_class": null,
96
  "top_k": 50,
97
  "top_p": 1.0,
98
- "torch_dtype": null,
99
  "torchscript": false,
100
  "typical_p": 1.0,
101
  "use_bfloat16": false,
@@ -103,6 +103,7 @@
103
  "use_scaled_rope": true,
104
  "vocab_size": 128256
105
  },
 
106
  "transformers_version": "4.45.0.dev0",
107
  "vision_config": {
108
  "_name_or_path": "",
@@ -217,7 +218,7 @@
217
  "tokenizer_class": null,
218
  "top_k": 50,
219
  "top_p": 1.0,
220
- "torch_dtype": null,
221
  "torchscript": false,
222
  "typical_p": 1.0,
223
  "use_bfloat16": false,
 
55
  },
56
  "length_penalty": 1.0,
57
  "max_length": 20,
58
+ "max_position_embeddings": 131072,
59
  "min_length": 0,
60
  "model_type": "mllama_text_model",
61
  "no_repeat_ngram_size": 0,
 
95
  "tokenizer_class": null,
96
  "top_k": 50,
97
  "top_p": 1.0,
98
+ "torch_dtype": "bfloat16",
99
  "torchscript": false,
100
  "typical_p": 1.0,
101
  "use_bfloat16": false,
 
103
  "use_scaled_rope": true,
104
  "vocab_size": 128256
105
  },
106
+ "torch_dtype": "bfloat16",
107
  "transformers_version": "4.45.0.dev0",
108
  "vision_config": {
109
  "_name_or_path": "",
 
218
  "tokenizer_class": null,
219
  "top_k": 50,
220
  "top_p": 1.0,
221
+ "torch_dtype": "bfloat16",
222
  "torchscript": false,
223
  "typical_p": 1.0,
224
  "use_bfloat16": false,