File size: 924 Bytes
00e7da5
1
{"torch_dtype": "float32", "is_decoder": true, "architectures": ["OPTForCausalLM"], "bos_token_id": 2, "pad_token_id": 1, "eos_token_id": 2, "embed_dim": 16, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "vocab_size": 50265, "max_position_embeddings": 100, "num_attention_heads": 4, "word_embed_proj_dim": 16, "ffn_dim": 4, "hidden_size": 16, "num_hidden_layers": 5, "dropout": 0.1, "attention_dropout": 0.1, "activation_function": "relu", "init_std": 0.02, "layerdrop": 0.0, "use_cache": true, "do_layer_norm_before": true, "enable_bias": true, "layer_norm_elementwise_affine": true, "_remove_final_layer_norm": false}