danielhanchen
commited on
Upload Qwen2ForCausalLM
Browse files- config.json +1 -1
- generation_config.json +1 -1
config.json
CHANGED
@@ -43,7 +43,7 @@
|
|
43 |
"sliding_window": null,
|
44 |
"tie_word_embeddings": false,
|
45 |
"torch_dtype": "bfloat16",
|
46 |
-
"transformers_version": "4.48.
|
47 |
"unsloth_fixed": true,
|
48 |
"use_cache": true,
|
49 |
"use_sliding_window": false,
|
|
|
43 |
"sliding_window": null,
|
44 |
"tie_word_embeddings": false,
|
45 |
"torch_dtype": "bfloat16",
|
46 |
+
"transformers_version": "4.48.1",
|
47 |
"unsloth_fixed": true,
|
48 |
"use_cache": true,
|
49 |
"use_sliding_window": false,
|
generation_config.json
CHANGED
@@ -4,5 +4,5 @@
|
|
4 |
"eos_token_id": 151643,
|
5 |
"max_length": 131072,
|
6 |
"pad_token_id": 151654,
|
7 |
-
"transformers_version": "4.48.
|
8 |
}
|
|
|
4 |
"eos_token_id": 151643,
|
5 |
"max_length": 131072,
|
6 |
"pad_token_id": 151654,
|
7 |
+
"transformers_version": "4.48.1"
|
8 |
}
|