{ "_name_or_path": "bert-base-cased", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "\"", "1": "''", "2": "#", "3": "$", "4": "(", "5": ")", "6": ",", "7": ".", "8": ":", "9": "``", "10": "CC", "11": "CD", "12": "DT", "13": "EX", "14": "FW", "15": "IN", "16": "JJ", "17": "JJR", "18": "JJS", "19": "LS", "20": "MD", "21": "NN", "22": "NNP", "23": "NNPS", "24": "NNS", "25": "NN|SYM", "26": "PDT", "27": "POS", "28": "PRP", "29": "PRP$", "30": "RB", "31": "RBR", "32": "RBS", "33": "RP", "34": "SYM", "35": "TO", "36": "UH", "37": "VB", "38": "VBD", "39": "VBG", "40": "VBN", "41": "VBP", "42": "VBZ", "43": "WDT", "44": "WP", "45": "WP$", "46": "WRB" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "\"": 0, "#": 2, "$": 3, "''": 1, "(": 4, ")": 5, ",": 6, ".": 7, ":": 8, "CC": 10, "CD": 11, "DT": 12, "EX": 13, "FW": 14, "IN": 15, "JJ": 16, "JJR": 17, "JJS": 18, "LS": 19, "MD": 20, "NN": 21, "NNP": 22, "NNPS": 23, "NNS": 24, "NN|SYM": 25, "PDT": 26, "POS": 27, "PRP": 28, "PRP$": 29, "RB": 30, "RBR": 31, "RBS": 32, "RP": 33, "SYM": 34, "TO": 35, "UH": 36, "VB": 37, "VBD": 38, "VBG": 39, "VBN": 40, "VBP": 41, "VBZ": 42, "WDT": 43, "WP": 44, "WP$": 45, "WRB": 46, "``": 9 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "transformers_version": "4.41.2", "type_vocab_size": 2, "use_cache": true, "vocab_size": 28996 }