File size: 1,181 Bytes
01eb322 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
{
"architectures": [
"LlamaForSequenceClassification"
],
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 4096,
"id2label": {
"0": "animal_abuse",
"1": "child_abuse",
"2": "controversial_topics,politics",
"3": "discrimination,stereotype,injustice",
"4": "drug_abuse,weapons,banned_substance",
"5": "financial_crime,property_crime,theft",
"6": "hate_speech,offensive_language",
"7": "misinformation_regarding_ethics,laws_and_safety",
"8": "non_violent_unethical_behavior",
"9": "privacy_violation",
"10": "self_harm",
"11": "sexually_explicit,adult_content",
"12": "terrorism,organized_crime",
"13": "violence,aiding_and_abetting,incitement"
},
"initializer_range": 0.02,
"intermediate_size": 11008,
"label2id": null,
"max_position_embeddings": 2048,
"model_type": "llama",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"pad_token_id": 32000,
"problem_type": "multi_label_classification",
"rms_norm_eps": 1e-06,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.28.1",
"use_cache": true,
"vocab_size": 32001
}
|