ebsmothers commited on
Commit
448411e
·
verified ·
1 Parent(s): a6339ff

Upload folder using huggingface_hub

Browse files
adapter_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d16722945a8b8cfbbd2f5cec7732c0c30b1250aa3c5fb19899d43639712b378
3
+ size 7902138
adapter_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"r": 8, "lora_alpha": 16, "target_modules": ["q_proj", "v_proj"], "peft_type": "LORA"}
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5bb268de8825cfb077a8891612fa31c05fbd85f0b8fe395bc5d7039bef68aad
3
+ size 7905962
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "bos_token_id": 100000, "eos_token_id": 100001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "num_attention_heads": 32, "num_hidden_layers": 30, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.33.1", "use_cache": true, "vocab_size": 102400}
hf_model_0001_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e05d9529e994fe7374c670c98fdcf2b5cae0c7f2305d2dca1a6b9097ae212098
3
+ size 9968192046
hf_model_0002_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e6c4721dc41cc34b1f0c474a066724314c5b08d0a97973c09dafa29ef1490b5
3
+ size 3852630031