atabeyunlu commited on
Commit
77cf97c
·
verified ·
1 Parent(s): 47ac0b8

Upload model

Browse files
Files changed (3) hide show
  1. config.json +5 -5
  2. generation_config.json +2 -2
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,14 +1,14 @@
1
  {
2
- "_name_or_path": "/home/atabey/Prot2Mol_au/saved_models/lr_1e-05_bs_64_ep_50_wd_0.0005_nlayer_4_nhead_16_prot_prot_t5_dataset_prot_comp_set_pchembl_6_protlen_1000_human_False/checkpoint-294600",
3
  "activation_function": "gelu_new",
4
  "add_cross_attention": true,
5
  "architectures": [
6
  "GPT2LMHeadModel"
7
  ],
8
  "attn_pdrop": 0.1,
9
- "bos_token_id": 0,
10
  "embd_pdrop": 0.1,
11
- "eos_token_id": 2,
12
  "initializer_range": 0.02,
13
  "is_decoder": true,
14
  "layer_norm_epsilon": 1e-05,
@@ -17,7 +17,7 @@
17
  "n_head": 16,
18
  "n_inner": null,
19
  "n_layer": 4,
20
- "n_positions": 256,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
23
  "scale_attn_by_inverse_layer_idx": false,
@@ -30,5 +30,5 @@
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.44.0",
32
  "use_cache": true,
33
- "vocab_size": 201
34
  }
 
1
  {
2
+ "_name_or_path": "/home/atabey/Prot2Mol_au/saved_models/good_model_kinda",
3
  "activation_function": "gelu_new",
4
  "add_cross_attention": true,
5
  "architectures": [
6
  "GPT2LMHeadModel"
7
  ],
8
  "attn_pdrop": 0.1,
9
+ "bos_token_id": 50256,
10
  "embd_pdrop": 0.1,
11
+ "eos_token_id": 50256,
12
  "initializer_range": 0.02,
13
  "is_decoder": true,
14
  "layer_norm_epsilon": 1e-05,
 
17
  "n_head": 16,
18
  "n_inner": null,
19
  "n_layer": 4,
20
+ "n_positions": 512,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
23
  "scale_attn_by_inverse_layer_idx": false,
 
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.44.0",
32
  "use_cache": true,
33
+ "vocab_size": 228
34
  }
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 0,
4
- "eos_token_id": 2,
5
  "transformers_version": "4.44.0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
  "transformers_version": "4.44.0"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fe3e694489f58519238b00fef3a482bc54eafceefa63060460a8e14af0e42acf
3
- size 270635736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:833b70a7a1d2153e84f5e22e4ca412f77223669af753bad2d9b1430d1b1a7f8a
3
+ size 271794904