nenkoru commited on
Commit
8b690b6
1 Parent(s): d556cf5

Upload with huggingface_hub

Browse files
.gitattributes CHANGED
@@ -32,3 +32,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ Constant_161_attr__value filter=lfs diff=lfs merge=lfs -text
36
+ Constant_169_attr__value filter=lfs diff=lfs merge=lfs -text
37
+ decoder_model.onnx_data filter=lfs diff=lfs merge=lfs -text
Constant_161_attr__value ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6edec9e4b8ef2a104c6df52da29fdde53fdda740e89f909133154cbcbd5f7be3
3
+ size 1048576
Constant_169_attr__value ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8b2c3afb641a83b5f366889f4cfd3d6fef51b67e5557d3d4805ae6723eaaad3
3
+ size 1048576
config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "alpaca-lora/hf_ckpt/",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 0,
7
+ "eos_token_id": 1,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_sequence_length": 2048,
13
+ "model_type": "llama",
14
+ "num_attention_heads": 32,
15
+ "num_hidden_layers": 32,
16
+ "pad_token_id": -1,
17
+ "rms_norm_eps": 1e-06,
18
+ "tie_word_embeddings": false,
19
+ "transformers_version": "4.28.0.dev0",
20
+ "use_cache": true,
21
+ "vocab_size": 32000
22
+ }
decoder_model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6678c0e1486e70694e214bc6187828ad199d12e1a1356edc157edf910686c52
3
+ size 3396934
decoder_model.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06892ee7815ee604e175b07793c24f23600019c477a6a8e327a53e7fc4cfa441
3
+ size 26953662464