Synchronizing local compiler cache.
Browse files- .gitattributes +4 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4b3ca94bb4445bc28bc8.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/335f8d1c7218c7410000.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/73b7467bea092a4ea612.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2d94b70ec1b5a0628be0.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/0b3e037f44d96a1e7239.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/f9468c010d2a222046f8.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/dacorvo/tiny-random-llama/62a76db84304b34ae305.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/1cfca4036d7b607639ea.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f3adfe5a9c79b5a36fd7.json +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_22208571677fdaef45b8+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_22208571677fdaef45b8+2c2d707e/model.hlo.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/model.hlo.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/model.neff +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_37735138bb35881b813c+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_37735138bb35881b813c+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/model.neff +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_af2cb26b11874417bb52+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_af2cb26b11874417bb52+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/model.hlo.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/model.neff +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_df460dc3641040b7f184+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_df460dc3641040b7f184+2c2d707e/model.hlo.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/compile_flags.txt +1 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/model.hlo_module.pb +3 -0
- neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/model.neff +3 -0
.gitattributes
CHANGED
@@ -2266,3 +2266,7 @@ neuronxcc-2.13.66.0+6dfecc895/MODULE_3707930837680218621+886f2c7a/model.neff fil
|
|
2266 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_3764015274843914896+886f2c7a/model.neff filter=lfs diff=lfs merge=lfs -text
|
2267 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_9121009113848966645+886f2c7a/model.neff filter=lfs diff=lfs merge=lfs -text
|
2268 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_964900186743760854+886f2c7a/model.neff filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
2266 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_3764015274843914896+886f2c7a/model.neff filter=lfs diff=lfs merge=lfs -text
|
2267 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_9121009113848966645+886f2c7a/model.neff filter=lfs diff=lfs merge=lfs -text
|
2268 |
neuronxcc-2.13.66.0+6dfecc895/MODULE_964900186743760854+886f2c7a/model.neff filter=lfs diff=lfs merge=lfs -text
|
2269 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
2270 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
2271 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
2272 |
+
neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/model.neff filter=lfs diff=lfs merge=lfs -text
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4b3ca94bb4445bc28bc8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/335f8d1c7218c7410000.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/73b7467bea092a4ea612.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/gpt2/d5e93094d604b84cb59a.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/2d94b70ec1b5a0628be0.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/HuggingFaceTB/cosmo-1b/83c64ad31c0699e3053e.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 2048, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "HuggingFaceTB/cosmo-1b", "checkpoint_revision": "0d5e341cfe835dffc81b6186f9715c094889f8ce", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 2048, "task": "text-generation"}, "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/0b3e037f44d96a1e7239.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/f9468c010d2a222046f8.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "neuron": {"auto_cast_type": "f16", "batch_size": 2, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/llama/dacorvo/tiny-random-llama/62a76db84304b34ae305.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/1cfca4036d7b607639ea.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
|
neuronxcc-2.13.66.0+6dfecc895/0_REGISTRY/0.0.22.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f3adfe5a9c79b5a36fd7.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.13.66.0+6dfecc895", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_22208571677fdaef45b8+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_22208571677fdaef45b8+2c2d707e/model.hlo.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:903548378821a6a55056bc32966b017b3ead1dbdfc8c9d55e649249174895b0a
|
3 |
+
size 381754
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/model.hlo.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a8652a9d43fe6eb1f68cf74df9ef3a6b4a45d8f7b1b6486d97c51cb1bb7dc351
|
3 |
+
size 252757
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_2479adeb0b632872414f+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a2c29ecd11672e0c03c644f1e8ea26e20f50f0ee271114e20655c4d92ba7d8f6
|
3 |
+
size 10947584
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_37735138bb35881b813c+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_37735138bb35881b813c+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:907cb88299ef60d1a6cbe46a0f1fdf18a6c517942a285d7eec6885bf7babd423
|
3 |
+
size 336782
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4646afa2cacf5cf4ebf684da166a68eeb1b75ba073fee8a1e874d54d5f7e246e
|
3 |
+
size 336782
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_90280928489ab33e491b+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9fcbd07bfa72ef77799b02e660af49dfd82f12a662c160b33be51e36b1d14547
|
3 |
+
size 16722944
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_af2cb26b11874417bb52+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_af2cb26b11874417bb52+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fb23829c01af566c984ff87b0287a5db822a5dc8aaa40f580e838673b4e6390e
|
3 |
+
size 381754
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/model.hlo.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f35d4429a9457f8d63a9f4da23ed5f20829978269b2822b04111c103a324d000
|
3 |
+
size 286680
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_b32b99eb1fd034e2ff1f+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3c30d851a8bfd680e277ec795bb11d0ae43000e5993389c8c1e8b9d6ba25abf7
|
3 |
+
size 4568064
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_df460dc3641040b7f184+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_df460dc3641040b7f184+2c2d707e/model.hlo.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f45d7ae3df88d4f330b1a687e8f5bee2fcc6a67cf059a2b9fe8373f20486d145
|
3 |
+
size 336782
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/compile_flags.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
--model-type=transformer --auto-cast=none
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/model.hlo_module.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eaa3859233537a1979e68dad0cd6c9ddf6a2b59d37ae2b5927ec6edb5928b1c2
|
3 |
+
size 381754
|
neuronxcc-2.13.66.0+6dfecc895/MODULE_e23c51a777ec06a5c339+2c2d707e/model.neff
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5a5c402228d18ab987fcb81aa4c0848ea894fc4e4e1a4be572348b448d71cef
|
3 |
+
size 13466624
|