File size: 1,014 Bytes
3aee24b
1
{"vocab_size": 1024, "hidden_size": 32, "n_layer": 5, "n_head": 4, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "use_cache": true, "pretraining_tp": 1, "apply_residual_connection_post_layernorm": false, "hidden_dropout": 0.1, "attention_dropout": 0.1, "bos_token_id": 1, "eos_token_id": 2, "slow_but_exact": true, "torch_dtype": "float32", "is_decoder": true, "architectures": ["BloomForCausalLM"], "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "pad_token_id": 3, "dtype": "float32", "gradient_checkpointing": false, "model_type": "bloom", "n_positions": 512, "seq_length": 7, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "0f4f06f162cd67d34d03ee156484e4001d468500"}}