{ "_from_model_config": true, "bos_token_id": 50256, "early_stopping": true, "eos_token_id": 50256, "max_length": 512, "min_length": 50, "no_repeat_ngram_size": 2, "num_beams": 5, "output_attentions": true, "output_hidden_states": true, "pad_token_id": 50256, "transformers_version": "4.44.2" }