danielhanchen commited on
Commit
1068448
·
verified ·
1 Parent(s): 5560285

Upload Qwen2ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
config.json CHANGED
@@ -43,7 +43,7 @@
43
  "sliding_window": null,
44
  "tie_word_embeddings": false,
45
  "torch_dtype": "bfloat16",
46
- "transformers_version": "4.48.0",
47
  "unsloth_fixed": true,
48
  "use_cache": true,
49
  "use_sliding_window": false,
 
43
  "sliding_window": null,
44
  "tie_word_embeddings": false,
45
  "torch_dtype": "bfloat16",
46
+ "transformers_version": "4.48.1",
47
  "unsloth_fixed": true,
48
  "use_cache": true,
49
  "use_sliding_window": false,
generation_config.json CHANGED
@@ -4,5 +4,5 @@
4
  "eos_token_id": 151643,
5
  "max_length": 131072,
6
  "pad_token_id": 151654,
7
- "transformers_version": "4.48.0"
8
  }
 
4
  "eos_token_id": 151643,
5
  "max_length": 131072,
6
  "pad_token_id": 151654,
7
+ "transformers_version": "4.48.1"
8
  }