robingeibel's picture
End of training
3179181
raw
history blame
1.66 kB
{
"_name_or_path": "robingeibel/reformer-big_patent-16384",
"architectures": [
"ReformerForMaskedLM"
],
"attention_head_size": 64,
"attention_probs_dropout_prob": 0.1,
"attn_layers": [
"local",
"local",
"lsh",
"local",
"local",
"local",
"lsh",
"local",
"local",
"local",
"lsh",
"local"
],
"axial_norm_std": 1.0,
"axial_pos_embds": false,
"axial_pos_embds_dim": [
128,
128
],
"axial_pos_shape": [
512,
1024
],
"chunk_size_lm_head": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"feed_forward_size": 1024,
"hash_seed": null,
"hidden_act": "relu",
"hidden_dropout_prob": 0.05,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"local_attention_probs_dropout_prob": 0.05,
"local_attn_chunk_length": 64,
"local_num_chunks_after": 0,
"local_num_chunks_before": 1,
"lsh_attention_probs_dropout_prob": 0.0,
"lsh_attn_chunk_length": 64,
"lsh_num_chunks_after": 0,
"lsh_num_chunks_before": 1,
"max_position_embeddings": 16384,
"model_type": "reformer",
"num_attention_heads": 12,
"num_buckets": [
64,
128
],
"num_chunks_after": 0,
"num_chunks_before": 1,
"num_hashes": 1,
"num_hidden_layers": 12,
"output_attentions": true,
"output_hidden_states": true,
"output_past": true,
"pad_token_id": 0,
"task_specific_params": {
"text-generation": {
"do_sample": true,
"max_length": 100
}
},
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.20.1",
"use_cache": true,
"vocab_size": 52000
}