Synchronizing local compiler cache.
Browse files- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/daaa8263f7776eb476dd.json +1 -0
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/dbbfa3e66746ae8b762d.json +1 -0
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20.dev0/inference/llama/NousResearch/Llama-2-7b-hf/16636cb4f92ce1bd7569.json +1 -0
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20/inference/gpt2/gpt2/f63183731214a421a012.json +1 -0
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.21.dev0/inference/gpt2/gpt2/f63183731214a421a012.json +1 -0
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/daaa8263f7776eb476dd.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8"}}
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/dbbfa3e66746ae8b762d.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}}
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20.dev0/inference/llama/NousResearch/Llama-2-7b-hf/16636cb4f92ce1bd7569.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "NousResearch/Llama-2-7b-hf", "checkpoint_revision": "dacdfcde31297e34b19ee0e7532f29586d2c17bc", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}}
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.20/inference/gpt2/gpt2/f63183731214a421a012.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"vocab_size": 50257, "n_positions": 1024, "n_embd": 768, "n_layer": 12, "n_head": 12, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 50256, "eos_token_id": 50256, "architectures": ["GPT2LMHeadModel"], "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "model_type": "gpt2", "n_ctx": 1024, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 1024, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e"}}
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.21.dev0/inference/gpt2/gpt2/f63183731214a421a012.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"vocab_size": 50257, "n_positions": 1024, "n_embd": 768, "n_layer": 12, "n_head": 12, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 50256, "eos_token_id": 50256, "architectures": ["GPT2LMHeadModel"], "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "model_type": "gpt2", "n_ctx": 1024, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 1024, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e"}}
|