{ "_name_or_path": "GerMedBERT/medbert-512", "architectures": [ "BertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "1-442.0", "1": "1-632.y", "2": "3-03", "3": "3-030", "4": "3-20", "5": "3-202", "6": "3-206", "7": "3-207", "8": "3-80", "9": "3-804", "10": "5-501.5x", "11": "5-501.y", "12": "5-504.y", "13": "6-001.1", "14": "6-003.b", "15": "6-008.m", "16": "8-52", "17": "8-52a", "18": "8-530.a", "19": "8-54", "20": "8-541.6", "21": "8-542", "22": "8-547", "23": "A02BC02", "24": "B18.2", "25": "C22.0", "26": "C43.9", "27": "C69.3", "28": "C77.2", "29": "C77.9", "30": "C78.0", "31": "C78.6", "32": "C78.7", "33": "C79.5", "34": "C79.88", "35": "C79.9", "36": "D37.6", "37": "D48.9", "38": "I81", "39": "I85.9", "40": "K70.3", "41": "K74.6", "42": "L01AB02", "43": "L01BC05", "44": "L01XA01", "45": "L01XC17", "46": "L01XE05", "47": "R18", "48": "R52.9", "49": "R53" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "1-442.0": 0, "1-632.y": 1, "3-03": 2, "3-030": 3, "3-20": 4, "3-202": 5, "3-206": 6, "3-207": 7, "3-80": 8, "3-804": 9, "5-501.5x": 10, "5-501.y": 11, "5-504.y": 12, "6-001.1": 13, "6-003.b": 14, "6-008.m": 15, "8-52": 16, "8-52a": 17, "8-530.a": 18, "8-54": 19, "8-541.6": 20, "8-542": 21, "8-547": 22, "A02BC02": 23, "B18.2": 24, "C22.0": 25, "C43.9": 26, "C69.3": 27, "C77.2": 28, "C77.9": 29, "C78.0": 30, "C78.6": 31, "C78.7": 32, "C79.5": 33, "C79.88": 34, "C79.9": 35, "D37.6": 36, "D48.9": 37, "I81": 38, "I85.9": 39, "K70.3": 40, "K74.6": 41, "L01AB02": 42, "L01BC05": 43, "L01XA01": 44, "L01XC17": 45, "L01XE05": 46, "R18": 47, "R52.9": 48, "R53": 49 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 8, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.40.1", "type_vocab_size": 2, "use_cache": true, "vocab_size": 30000 }