fix fp16 bug
Browse files- config.json +1 -1
config.json
CHANGED
@@ -8,7 +8,7 @@
|
|
8 |
"AutoConfig": "configuration_qwen.QWenConfig",
|
9 |
"AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
|
10 |
},
|
11 |
-
"bf16":
|
12 |
"emb_dropout_prob": 0.0,
|
13 |
"fp16": false,
|
14 |
"fp32": false,
|
|
|
8 |
"AutoConfig": "configuration_qwen.QWenConfig",
|
9 |
"AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
|
10 |
},
|
11 |
+
"bf16": false,
|
12 |
"emb_dropout_prob": 0.0,
|
13 |
"fp16": false,
|
14 |
"fp32": false,
|