BLOOM_1B1_PROMPT_TUNING_CAUSAL_LM / adapter_config.json
scholl99's picture
Upload model
021b219 verified
raw
history blame
956 Bytes
{
"auto_mapping": null,
"base_model_name_or_path": "bigscience/bloom-1b1",
"inference_mode": true,
"num_attention_heads": 16,
"num_layers": 24,
"num_transformer_submodules": 1,
"num_virtual_tokens": 20,
"peft_type": "PROMPT_TUNING",
"prompt_tuning_init": "TEXT",
"prompt_tuning_init_text": "Given the review, returning one or more from ['price#neutral', 'service#positive',\n'service#conflict', 'ambience#positive', 'price#positive', 'service#neutral', 'ambience#conflict',\n'food#neutral', 'service#negative', 'ambience#negative', 'anecdotes/miscellaneous#conflict',\n'food#conflict', 'price#conflict', 'anecdotes/miscellaneous#negative', 'food#positive',\n'anecdotes/miscellaneous#neutral', 'food#negative', 'ambience#neutral', 'anecdotes/miscellaneous#positive', 'price#negative'].",
"revision": null,
"task_type": "CAUSAL_LM",
"token_dim": 1536,
"tokenizer_kwargs": null,
"tokenizer_name_or_path": "bigscience/bloom-1b1"
}