Synchronizing local compiler cache.
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +12 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27.dev0/inference/qwen2/Qwen/Qwen2.5-Coder-32B-Instruct/39b69bbd54bed005e329.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27.dev0/inference/qwen2/Qwen/Qwen2.5-Coder-7B-Instruct/c19875bdff42cd555d4c.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4538babd01182da34f41.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/b40a794eff5ea470980f.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cdbdbf737272e98ae974.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/gpt2/aadac442fccfdd730bb5.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/1f8668afaa87da7a4bc8.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/5e4f40c305b67f8eb70c.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/af0ef68e4ec57e23cab5.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/e0c5534b610fbfb22b6e.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/ff84bf8571014fc7a409.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/llamafactory/tiny-random-Llama-3/1ef3929e28ba91cc3cc6.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/llamafactory/tiny-random-Llama-3/71ec0a49c3535c618d03.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/llamafactory/tiny-random-Llama-3/eb3f26aa23141bff2292.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/7d2df9c19098ee676fa8.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ac0287246ef4575f8b78.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/af0f1b79454574c744eb.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/optimum/mistral-1.1b-testing/37aed4e6734aa8542ee8.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/8dde518ab8e435c4c830.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/9ac32cae9a1e9b72b9ed.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/d777c911e955212a0132.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/fc0751b75e6a6a09570f.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/93d7843b7abe79348924.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b392ca0eec3368da72ec.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f829b04255e2c75cd7c8.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/qwen2/Qwen/Qwen2.5-0.5B/f404b6bcd5e4c946681b.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/granite/ibm-granite/granite-3.0-8b-instruct/1438e8ced52a00df5a5e.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/febd5daac4e51196a9ec.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.neff +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.neff +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.neff +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.neff +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/compile_flags.json +1 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.done +0 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.hlo_module.pb +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.neff +3 -0
- neuronxcc-2.15.143.0+e39249ad/MODULE_67a9118947e020080e96+39f12043/compile_flags.json +1 -0
.gitattributes
CHANGED
@@ -6488,3 +6488,15 @@ neuronxcc-2.16.303.0+d9f03cda/MODULE_efc674ceb57a1f7010a5+7ac4dbae/model.neff fi
|
|
6488 |
neuronxcc-2.16.303.0+d9f03cda/MODULE_ff0a9145bffbede97cde+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6489 |
neuronxcc-2.16.303.0+d9f03cda/MODULE_5e72d9266a7fb87f15cb+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6490 |
neuronxcc-2.16.303.0+d9f03cda/MODULE_97b24281cd74002e4729+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6488 |
neuronxcc-2.16.303.0+d9f03cda/MODULE_ff0a9145bffbede97cde+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6489 |
neuronxcc-2.16.303.0+d9f03cda/MODULE_5e72d9266a7fb87f15cb+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6490 |
neuronxcc-2.16.303.0+d9f03cda/MODULE_97b24281cd74002e4729+7ac4dbae/model.neff filter=lfs diff=lfs merge=lfs -text
|
6491 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6492 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6493 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6494 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6495 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6496 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_67a9118947e020080e96+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6497 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_7dbd99330688237c55ff+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6498 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_aa4954701eb403167936+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6499 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_aefce72d1e0612585993+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6500 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_b0acdacec2caf4887452+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6501 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_c7dd940fbcfda75b0522+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
6502 |
+
neuronxcc-2.15.143.0+e39249ad/MODULE_e1bacfe9616dab23b889+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27.dev0/inference/qwen2/Qwen/Qwen2.5-Coder-32B-Instruct/39b69bbd54bed005e329.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 27648, "max_position_embeddings": 32768, "max_window_layers": 64, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "Qwen/Qwen2.5-Coder-32B-Instruct", "checkpoint_revision": "b47205940b83b5b484577359f71ee7b88472df67", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 64, "num_key_value_heads": 8, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27.dev0/inference/qwen2/Qwen/Qwen2.5-Coder-7B-Instruct/c19875bdff42cd555d4c.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-Coder-7B-Instruct", "checkpoint_revision": "0eb6b1ed2d0c4306bc637d09ecef51e59d3dfe05", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4538babd01182da34f41.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/b40a794eff5ea470980f.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cdbdbf737272e98ae974.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/gpt2/aadac442fccfdd730bb5.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/1f8668afaa87da7a4bc8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/5e4f40c305b67f8eb70c.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 24, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/af0ef68e4ec57e23cab5.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/gpt2/hf-internal-testing/tiny-random-gpt2/e0c5534b610fbfb22b6e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/ff84bf8571014fc7a409.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "57a73110702e7b05ba3f39fef36297454c680725", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/llamafactory/tiny-random-Llama-3/1ef3929e28ba91cc3cc6.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128009, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "2d9d8c1112e9cd4b0d66bb612e09be7da7997b18", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/llamafactory/tiny-random-Llama-3/71ec0a49c3535c618d03.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128009, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "2d9d8c1112e9cd4b0d66bb612e09be7da7997b18", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/llama/llamafactory/tiny-random-Llama-3/eb3f26aa23141bff2292.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128009, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "2d9d8c1112e9cd4b0d66bb612e09be7da7997b18", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/7d2df9c19098ee676fa8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ac0287246ef4575f8b78.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/af0f1b79454574c744eb.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mistral/optimum/mistral-1.1b-testing/37aed4e6734aa8542ee8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "optimum/mistral-1.1b-testing", "checkpoint_revision": "ce03bc8d47dbd2c173ff65f3a8de1325ba724195", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/8dde518ab8e435c4c830.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/9ac32cae9a1e9b72b9ed.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/d777c911e955212a0132.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/mixtral/dacorvo/Mixtral-tiny/fc0751b75e6a6a09570f.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/93d7843b7abe79348924.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b392ca0eec3368da72ec.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f829b04255e2c75cd7c8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/qwen2/Qwen/Qwen2.5-0.5B/f404b6bcd5e4c946681b.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 896, "initializer_range": 0.02, "intermediate_size": 4864, "max_position_embeddings": 32768, "max_window_layers": 24, "model_type": "qwen2", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-0.5B", "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/granite/ibm-granite/granite-3.0-8b-instruct/1438e8ced52a00df5a5e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["GraniteForCausalLM"], "attention_bias": false, "attention_dropout": 0.1, "attention_multiplier": 0.0078125, "bos_token_id": 0, "embedding_multiplier": 12.0, "eos_token_id": 0, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 12800, "logits_scaling": 16.0, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "granite", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "ibm-granite/granite-3.0-8b-instruct", "checkpoint_revision": "8fe1e202a17f7763bd0af471253e00cc846d1c05", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 8, "pad_token_id": 0, "residual_multiplier": 0.22, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 49155}
|
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/febd5daac4e51196a9ec.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
|
neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2514b1562518240f392dc23f26eed4690632f195ab3fdf22ce55b08f9fa8b84d
|
3 |
+
size 327080
|
neuronxcc-2.15.143.0+e39249ad/MODULE_0e92acc32b557ad5547d+39f12043/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cd5c03370d27a0f2139658bebe2adacf802aa58a577f20d1e5942e82663d673b
|
3 |
+
size 3277824
|
neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66139a9078f78d3463139b25024ac289e96ce1e865112ae0e4244da6d84cb53a
|
3 |
+
size 420857
|
neuronxcc-2.15.143.0+e39249ad/MODULE_39a9ccce9adcabc94675+39f12043/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c84f867b50085e0c6914a24ad8f4ce02588f36ca004b3c67f84b8bb18b008aeb
|
3 |
+
size 2898944
|
neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b1b1ae225c4153d533ac9f21418f6dc18c680a20eb8ee5e6eba4c7d9b525c855
|
3 |
+
size 420857
|
neuronxcc-2.15.143.0+e39249ad/MODULE_3c33fc8c5b8139f55998+39f12043/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:781c61aef015d69019a5b102df834d1c89f065a0b8aeefc2da8c25d93e856e43
|
3 |
+
size 3011584
|
neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:85b2908092b6828bd169dc87b156620f30b188c6b8aae2f980fc407902cc1bc9
|
3 |
+
size 420857
|
neuronxcc-2.15.143.0+e39249ad/MODULE_402e15278f41a32c430e+39f12043/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e06270f172345507c41126fa7d0dbb6f03994659f231c87a5a876e2f326ccf1e
|
3 |
+
size 2765824
|
neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|
neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.done
ADDED
File without changes
|
neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:11ae7d1bd42a59bd70b349ee9db316ab2c76c50bdded4b7a2e54b78d40c0e8c1
|
3 |
+
size 420857
|
neuronxcc-2.15.143.0+e39249ad/MODULE_6550f3db82ff02b1c1ff+39f12043/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de9d486c03967c75b66a525b123a6376d3484869a1b525e0e833133243924423
|
3 |
+
size 2776064
|
neuronxcc-2.15.143.0+e39249ad/MODULE_67a9118947e020080e96+39f12043/compile_flags.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
|