e-palmisano commited on
Commit
b5a0825
1 Parent(s): 6d9d7dc

(Trained with Unsloth)

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "model_3",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
@@ -21,7 +21,7 @@
21
  "sliding_window": 131072,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
- "transformers_version": "4.42.3",
25
  "unsloth_version": "2024.7",
26
  "use_cache": true,
27
  "use_sliding_window": false,
 
1
  {
2
+ "_name_or_path": "model",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
 
21
  "sliding_window": 131072,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.43.0.dev0",
25
  "unsloth_version": "2024.7",
26
  "use_cache": true,
27
  "use_sliding_window": false,
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
- "transformers_version": "4.42.3"
14
  }
 
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
+ "transformers_version": "4.43.0.dev0"
14
  }
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ac88ea3b5902d330f6dfb397954bf28c56d8c604a337723d2395fd02a23785d
3
  size 4877660776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98217e8ea245c38fb224072bd56253629a3bd69bd05f342c8432ffdfb6662afd
3
  size 4877660776
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1bc91ab9841e5d547e7e992fee499e5d43ef46781a94fb2b4bd2a3c702455aff
3
  size 4932751008
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ad1af11ce43511e0bb74954b2e6bbb4337ec5304a37d39291629fffc805d484
3
  size 4932751008
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71954c0589653fb565f14ed6e15fbc63e231a4463455611a0e57d5616f7958f8
3
  size 4330865200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59678eabe690b3c81695689dad1d9475b591369f7ad073a280db710fbf666b04
3
  size 4330865200
tokenizer_config.json CHANGED
@@ -31,7 +31,7 @@
31
  "<|im_end|>"
32
  ],
33
  "bos_token": null,
34
- "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['from'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['from'] + '\n' + message['value'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
35
  "clean_up_tokenization_spaces": false,
36
  "eos_token": "<|im_end|>",
37
  "errors": "replace",
 
31
  "<|im_end|>"
32
  ],
33
  "bos_token": null,
34
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
35
  "clean_up_tokenization_spaces": false,
36
  "eos_token": "<|im_end|>",
37
  "errors": "replace",