sadkins65 commited on
Commit
87660d6
1 Parent(s): 95a9d73

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/root/.cache/huggingface/hub/models--TinyLlama--TinyLlama-1.1B-intermediate-step-1431k-3T/snapshots/036fa4651240b9a1487f709833b9e4b96b4c1574",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -46,7 +46,7 @@
46
  "quantization_status": "frozen",
47
  "sparsity_config": {
48
  "format": "dense",
49
- "global_sparsity": 7.8259900429979625,
50
  "registry_requires_subclass": false,
51
  "sparsity_structure": "unstructured"
52
  }
@@ -66,8 +66,8 @@
66
  "rope_scaling": null,
67
  "rope_theta": 10000.0,
68
  "tie_word_embeddings": false,
69
- "torch_dtype": "float16",
70
- "transformers_version": "4.40.0",
71
  "use_cache": true,
72
  "vocab_size": 32000
73
  }
 
1
  {
2
+ "_name_or_path": "/root/.cache/huggingface/hub/models--TinyLlama--TinyLlama-1.1B-Chat-v1.0/snapshots/fe8a4ea1ffedaf415f4da2f062534de366a451e6",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
46
  "quantization_status": "frozen",
47
  "sparsity_config": {
48
  "format": "dense",
49
+ "global_sparsity": 7.817371603902107,
50
  "registry_requires_subclass": false,
51
  "sparsity_structure": "unstructured"
52
  }
 
66
  "rope_scaling": null,
67
  "rope_theta": 10000.0,
68
  "tie_word_embeddings": false,
69
+ "torch_dtype": "bfloat16",
70
+ "transformers_version": "4.40.2",
71
  "use_cache": true,
72
  "vocab_size": 32000
73
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.40.0"
7
  }
 
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.40.2"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:85e1306ddfc0991bfe20892458b4caecd4605f2f02faabee21a71e211c177a28
3
- size 1231269448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e74d502e7ef41e8aff763927769f95f703bc446a576ce6ef6b396e4940bf375
3
+ size 1231269800
special_tokens_map.json CHANGED
@@ -13,7 +13,13 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "</s>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -32,7 +32,7 @@
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": false,
35
- "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "</s>",
37
  "padding_side": "right",
38
  "sp_model_kwargs": {},
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": false,
35
+ "model_max_length": 2048,
36
  "pad_token": "</s>",
37
  "padding_side": "right",
38
  "sp_model_kwargs": {},