MaxJeblick commited on
Commit
a664900
1 Parent(s): 925de87

Upload LlamaForCausalLM

Browse files
Files changed (4) hide show
  1. README.md +3 -0
  2. config.json +3 -2
  3. generation_config.json +1 -1
  4. model.safetensors +3 -0
README.md CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  Small dummy LLama2-type Model useable for Unit/Integration tests. Suitable for CPU only machines, see [H2O LLM Studio](https://github.com/h2oai/h2o-llmstudio/blob/main/tests/integration/test_integration.py) for an example integration test.
2
 
3
  Model was created as follows:
 
1
+ ---
2
+ {}
3
+ ---
4
  Small dummy LLama2-type Model useable for Unit/Integration tests. Suitable for CPU only machines, see [H2O LLM Studio](https://github.com/h2oai/h2o-llmstudio/blob/main/tests/integration/test_integration.py) for an example integration test.
5
 
6
  Model was created as follows:
config.json CHANGED
@@ -4,13 +4,14 @@
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
 
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
10
  "hidden_size": 12,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 24,
13
- "max_position_embeddings": 32,
14
  "model_type": "llama",
15
  "num_attention_heads": 2,
16
  "num_hidden_layers": 2,
@@ -21,7 +22,7 @@
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "float32",
24
- "transformers_version": "4.34.0",
25
  "use_cache": true,
26
  "vocab_size": 32000
27
  }
 
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 12,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 24,
14
+ "max_position_embeddings": 1024,
15
  "model_type": "llama",
16
  "num_attention_heads": 2,
17
  "num_hidden_layers": 2,
 
22
  "rope_theta": 10000.0,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
+ "transformers_version": "4.38.1",
26
  "use_cache": true,
27
  "vocab_size": 32000
28
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.34.0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.38.1"
6
  }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5108f9b61c4c32b2ae72fd11c85535054ea4ffef80fa0fb8a2cd7c5d0e7de717
3
+ size 3085952