SMILY-BPE-tox21 / config.json
mikemayuare's picture
Upload RobertaForSequenceClassification
d6f6d8e verified
raw
history blame
1.38 kB
{
"_name_or_path": "../models/smiles/bpe_hp_tuned_1M/checkpoint-625000",
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.12334099184282044,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.15123214440812988,
"hidden_size": 768,
"id2label": {
"0": "NR - AR",
"1": "NR - AR - LBD",
"2": "NR - AhR",
"3": "NR - Aromatase",
"4": "NR - ER",
"5": "NR - ER - LBD",
"6": "NR - PPAR - gamma",
"7": "SR - ARE",
"8": "SR - ATAD5",
"9": "SR - HSE",
"10": "SR - MMP",
"11": "SR - p53"
},
"initializer_range": 0.02,
"intermediate_size": 1536,
"label2id": {
"NR - AR": 0,
"NR - AR - LBD": 1,
"NR - AhR": 2,
"NR - Aromatase": 3,
"NR - ER": 4,
"NR - ER - LBD": 5,
"NR - PPAR - gamma": 6,
"SR - ARE": 7,
"SR - ATAD5": 8,
"SR - HSE": 9,
"SR - MMP": 10,
"SR - p53": 11
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 6,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "multi_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.44.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 40000
}