Agent_Daedalus / config.json
Or4cl3-1's picture
Create config.json
bc331c7 verified
raw
history blame contribute delete
No virus
671 Bytes
{
"model_type": "auto",
"architectures": ["Agent"],
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"vocab_size": 50257,
"type_vocab_size": 2,
"use_cache": True,
"is_decoder": True,
"is_encoder_decoder": False,
"task_specific_params": {
"text-generation": {
"do_sample": True,
"max_length": 200,
"num_return_sequences": 1,
"top_k": 50,
"top_p": 0.95,
"temperature": 1.0
}
}
}