dacorvo HF staff commited on
Commit
609c068
1 Parent(s): e4ad714

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +14 -0
  2. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/12469cfbaa2be7979be9.json +1 -0
  3. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/3047f14e5cd104f6da88.json +1 -0
  4. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cbf5e07921ee14bd6483.json +1 -0
  5. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/gpt2/36aa91009c1c430c8dcf.json +1 -0
  6. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/gpt2/98f807ee88912bf4b9d8.json +1 -0
  7. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/0b5b5348467d20a868da.json +1 -0
  8. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/3a30f319d7cae1f3076c.json +1 -0
  9. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/6b3192766c61d426df54.json +1 -0
  10. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/74bc6efd0eb46f234bef.json +1 -0
  11. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/8a6582d10bf46ea7004e.json +1 -0
  12. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/eda26b3891458ca144a4.json +1 -0
  13. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/dacorvo/tiny-random-llama/08720e1cfeb1befa20c2.json +1 -0
  14. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/dacorvo/tiny-random-llama/5ba8a7533020030cff1d.json +1 -0
  15. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/dacorvo/tiny-random-llama/e7337f4af7481c9d827e.json +1 -0
  16. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/3bdcb3d6d5fb4b0b7097.json +1 -0
  17. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/3d676d12f1566545d6b0.json +1 -0
  18. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ad4d8b389573be75fbee.json +1 -0
  19. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/optimum/mistral-1.1b-testing/081b187113a5f417a9e0.json +1 -0
  20. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/3a688b7dcd45d9a80b14.json +1 -0
  21. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/89980755d7c8bc1b31b0.json +1 -0
  22. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/9f85ef35436cc6a20682.json +1 -0
  23. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/f1477b1f14a6669df1a7.json +1 -0
  24. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/63ef91a527f73ba3b87e.json +1 -0
  25. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/aa9fef08c5f79c9217fb.json +1 -0
  26. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/fe9dc58f7d028523a0a6.json +1 -0
  27. neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/t5/hf-internal-testing/tiny-random-t5/af6cce39edb0ba1725d3.json +1 -0
  28. neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/compile_flags.json +1 -0
  29. neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/model.done +0 -0
  30. neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/model.hlo_module.pb +3 -0
  31. neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/model.neff +0 -0
  32. neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/compile_flags.json +1 -0
  33. neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/model.done +0 -0
  34. neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/model.hlo_module.pb +3 -0
  35. neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/model.neff +0 -0
  36. neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/compile_flags.json +1 -0
  37. neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/model.done +0 -0
  38. neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/model.hlo_module.pb +3 -0
  39. neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/model.neff +0 -0
  40. neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/compile_flags.json +1 -0
  41. neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/model.done +0 -0
  42. neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/model.hlo_module.pb +3 -0
  43. neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/model.neff +0 -0
  44. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/feature_extractor/preprocessor_config.json +27 -0
  45. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/model_index.json +38 -0
  46. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/scheduler/scheduler_config.json +15 -0
  47. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/text_encoder/config.json +46 -0
  48. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/text_encoder/model.neuron +0 -0
  49. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/tokenizer/merges.txt +647 -0
  50. neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/tokenizer/special_tokens_map.json +24 -0
.gitattributes CHANGED
@@ -3874,3 +3874,17 @@ neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/text_encoder_2/model.n
3874
  neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/unet/model.neuron filter=lfs diff=lfs merge=lfs -text
3875
  neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/vae_decoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3876
  neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/vae_encoder/model.neuron filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3874
  neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/unet/model.neuron filter=lfs diff=lfs merge=lfs -text
3875
  neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/vae_decoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3876
  neuronxcc-2.14.227.0+2d4f85be/MODULE_3b2bf60b825c2327d04d/vae_encoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3877
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/unet/model.neuron filter=lfs diff=lfs merge=lfs -text
3878
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/vae_decoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3879
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/vae_encoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3880
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_1817e58bba6976294b3a/unet/model.neuron filter=lfs diff=lfs merge=lfs -text
3881
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_1817e58bba6976294b3a/vae_decoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3882
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_1817e58bba6976294b3a/vae_encoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3883
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_3a7777a73798af802ac1+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
3884
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_3da92ce16fff3bc522f8+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
3885
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_3e0ab9335d16a5ec49ef+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
3886
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_8a0f4078fcafddeba45a/unet/model.neuron filter=lfs diff=lfs merge=lfs -text
3887
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_8a0f4078fcafddeba45a/vae_decoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3888
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_8a0f4078fcafddeba45a/vae_encoder/model.neuron filter=lfs diff=lfs merge=lfs -text
3889
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_d1618c04ba6ca3abe828+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
3890
+ neuronxcc-2.15.128.0+56dc5a86/MODULE_d36186ef4c022d90fe8c+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/12469cfbaa2be7979be9.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/3047f14e5cd104f6da88.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cbf5e07921ee14bd6483.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/gpt2/36aa91009c1c430c8dcf.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 128, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/gpt2/98f807ee88912bf4b9d8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "gpt2", "checkpoint_revision": "607a30d783dfa663caf39e06633721c8d4cfcd7e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "use_cache": true, "vocab_size": 50257}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/0b5b5348467d20a868da.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/3a30f319d7cae1f3076c.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/6b3192766c61d426df54.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 24, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/74bc6efd0eb46f234bef.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/8a6582d10bf46ea7004e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/eda26b3891458ca144a4.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "d62e0c7237c7b851e8d9ae9277f9f107d174542c", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/dacorvo/tiny-random-llama/08720e1cfeb1befa20c2.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/dacorvo/tiny-random-llama/5ba8a7533020030cff1d.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/llama/dacorvo/tiny-random-llama/e7337f4af7481c9d827e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 128, "initializer_range": 0.02, "intermediate_size": 256, "max_position_embeddings": 512, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 1, "num_hidden_layers": 1, "num_key_value_heads": 1, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float32", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/3bdcb3d6d5fb4b0b7097.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/3d676d12f1566545d6b0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ad4d8b389573be75fbee.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mistral/optimum/mistral-1.1b-testing/081b187113a5f417a9e0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 64, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 32768, "model_type": "mistral", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "optimum/mistral-1.1b-testing", "checkpoint_revision": "ce03bc8d47dbd2c173ff65f3a8de1325ba724195", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/3a688b7dcd45d9a80b14.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/89980755d7c8bc1b31b0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 1024, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/9f85ef35436cc6a20682.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/mixtral/dacorvo/Mixtral-tiny/f1477b1f14a6669df1a7.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/63ef91a527f73ba3b87e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/aa9fef08c5f79c9217fb.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/fe9dc58f7d028523a0a6.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.15.128.0+56dc5a86/0_REGISTRY/0.0.25.dev0/inference/t5/hf-internal-testing/tiny-random-t5/af6cce39edb0ba1725d3.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"decoder": {"classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "dynamic_batch_size": false, "inline_weights_to_neff": true, "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "use_cache": true, "vocab_size": 1103}, "encoder": {"classifier_dropout": 0.0, "d_ff": 37, "d_kv": 8, "d_model": 32, "decoder_start_token_id": 0, "dense_act_fn": "relu", "dropout_rate": 0.1, "feed_forward_proj": "relu", "gradient_checkpointing": false, "initializer_factor": 0.002, "is_encoder_decoder": true, "is_gated_act": false, "layer_norm_epsilon": 1e-06, "model_type": "t5", "neuron": {"compiler_type": "neuronx-cc", "compiler_version": "2.15.128.0+56dc5a86", "dynamic_batch_size": false, "inline_weights_to_neff": true, "optlevel": "2", "output_attentions": true, "output_hidden_states": true, "static_batch_size": 1, "static_num_beams": 4, "static_sequence_length": 18}, "num_decoder_layers": 5, "num_heads": 4, "num_layers": 5, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 8, "task": "text2text-generation", "use_cache": true, "vocab_size": 1103}, "model_type": "t5"}
neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fd039f830e38474b35ffa7875e575823b49051252f489fa859a10027c0b0176
3
+ size 56998
neuronxcc-2.15.128.0+56dc5a86/MODULE_00a895c500f614d5e448+39f12043/model.neff ADDED
Binary file (124 kB). View file
 
neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55232d1f8a7bf3216037d725f1d9f971826f44b3f0bfe700dba381587050e2ea
3
+ size 46555
neuronxcc-2.15.128.0+56dc5a86/MODULE_0270f34f54c498cedeec+39f12043/model.neff ADDED
Binary file (400 kB). View file
 
neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19bad7c7a1aca39e7acb9d1644e06f34534612f3f9d8da135a7ac71b567941c4
3
+ size 142880
neuronxcc-2.15.128.0+56dc5a86/MODULE_05d01a658506892e6bcf+39f12043/model.neff ADDED
Binary file (646 kB). View file
 
neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8a4ae1405062b3adda459c6aa4a11045fe5add4801765e60a77d687a3c26e78
3
+ size 46216
neuronxcc-2.15.128.0+56dc5a86/MODULE_0d7bde45dbf2d5c06852+39f12043/model.neff ADDED
Binary file (441 kB). View file
 
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/feature_extractor/preprocessor_config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": {
3
+ "height": 224,
4
+ "width": 224
5
+ },
6
+ "do_center_crop": true,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_rescale": true,
10
+ "do_resize": true,
11
+ "image_mean": [
12
+ 0.48145466,
13
+ 0.4578275,
14
+ 0.40821073
15
+ ],
16
+ "image_processor_type": "CLIPImageProcessor",
17
+ "image_std": [
18
+ 0.26862954,
19
+ 0.26130258,
20
+ 0.27577711
21
+ ],
22
+ "resample": 3,
23
+ "rescale_factor": 0.00392156862745098,
24
+ "size": {
25
+ "shortest_edge": 224
26
+ }
27
+ }
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/model_index.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "StableDiffusionPipeline",
3
+ "_diffusers_version": "0.28.2",
4
+ "_name_or_path": "hf-internal-testing/tiny-stable-diffusion-torch",
5
+ "feature_extractor": [
6
+ "transformers",
7
+ "CLIPImageProcessor"
8
+ ],
9
+ "image_encoder": [
10
+ null,
11
+ null
12
+ ],
13
+ "requires_safety_checker": true,
14
+ "safety_checker": [
15
+ null,
16
+ null
17
+ ],
18
+ "scheduler": [
19
+ "diffusers",
20
+ "PNDMScheduler"
21
+ ],
22
+ "text_encoder": [
23
+ "transformers",
24
+ "CLIPTextModel"
25
+ ],
26
+ "tokenizer": [
27
+ "transformers",
28
+ "CLIPTokenizer"
29
+ ],
30
+ "unet": [
31
+ "diffusers",
32
+ "UNet2DConditionModel"
33
+ ],
34
+ "vae": [
35
+ "diffusers",
36
+ "AutoencoderKL"
37
+ ]
38
+ }
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/scheduler/scheduler_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "PNDMScheduler",
3
+ "_diffusers_version": "0.28.2",
4
+ "beta_end": 0.012,
5
+ "beta_schedule": "scaled_linear",
6
+ "beta_start": 0.00085,
7
+ "clip_sample": false,
8
+ "num_train_timesteps": 1000,
9
+ "prediction_type": "epsilon",
10
+ "set_alpha_to_one": false,
11
+ "skip_prk_steps": true,
12
+ "steps_offset": 1,
13
+ "timestep_spacing": "leading",
14
+ "trained_betas": null
15
+ }
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/text_encoder/config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/ubuntu/.cache/huggingface/hub/models--hf-internal-testing--tiny-stable-diffusion-torch/snapshots/a88cdfbd91f96ec7f61eb7484b652ff0f4ee701d/text_encoder",
3
+ "architectures": [
4
+ "CLIPTextModel"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
+ "dropout": 0.0,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "quick_gelu",
11
+ "hidden_size": 32,
12
+ "initializer_factor": 1.0,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 37,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 77,
17
+ "model_type": "clip_text_model",
18
+ "neuron": {
19
+ "compiler_type": "neuronx-cc",
20
+ "compiler_version": "2.15.128.0+56dc5a86",
21
+ "dynamic_batch_size": false,
22
+ "inline_weights_to_neff": true,
23
+ "input_names": [
24
+ "input_ids"
25
+ ],
26
+ "model_type": "clip-text-model",
27
+ "optlevel": "2",
28
+ "output_attentions": false,
29
+ "output_hidden_states": false,
30
+ "output_names": [
31
+ "last_hidden_state",
32
+ "pooler_output"
33
+ ],
34
+ "static_batch_size": 1,
35
+ "static_sequence_length": 77
36
+ },
37
+ "num_attention_heads": 4,
38
+ "num_hidden_layers": 5,
39
+ "pad_token_id": 1,
40
+ "projection_dim": 512,
41
+ "task": "feature-extraction",
42
+ "torch_dtype": "float32",
43
+ "torchscript": true,
44
+ "transformers_version": "4.43.2",
45
+ "vocab_size": 1000
46
+ }
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/text_encoder/model.neuron ADDED
Binary file (826 kB). View file
 
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/tokenizer/merges.txt ADDED
@@ -0,0 +1,647 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #version: 0.2
2
+ Ġ t
3
+ Ġt h
4
+ Ġ a
5
+ Ġth e</w>
6
+ i n
7
+ Ġ o
8
+ Ġ ,</w>
9
+ Ġ s
10
+ e d</w>
11
+ Ġ w
12
+ e r
13
+ Ġ .</w>
14
+ Ġ i
15
+ r e
16
+ Ġ c
17
+ n d</w>
18
+ Ġ f
19
+ Ġ b
20
+ a t
21
+ Ġo f</w>
22
+ e r</w>
23
+ e n
24
+ a r
25
+ o r
26
+ i t
27
+ Ġ p
28
+ Ġ h
29
+ Ġa nd</w>
30
+ o n
31
+ in g</w>
32
+ a n
33
+ r o
34
+ Ġ m
35
+ Ġ d
36
+ e s</w>
37
+ Ġi n</w>
38
+ o n</w>
39
+ Ġt o</w>
40
+ o u
41
+ i s
42
+ Ġ a</w>
43
+ i c
44
+ Ġ T
45
+ a l
46
+ Ġ l
47
+ Ġ =</w>
48
+ Ġ re
49
+ Ġ "</w>
50
+ e s
51
+ Ġ S
52
+ a s</w>
53
+ a l</w>
54
+ i l
55
+ e l
56
+ i on</w>
57
+ Ġ A
58
+ Ġ C
59
+ Ġ 1
60
+ Ġ Ċ</w>
61
+ u r
62
+ ĠT h
63
+ Ġ n
64
+ a s
65
+ Ġ @
66
+ e c
67
+ o m
68
+ a c
69
+ Ġ e
70
+ Ġw as</w>
71
+ Ġ M
72
+ o r</w>
73
+ a n</w>
74
+ a m
75
+ e n</w>
76
+ o l
77
+ Ġ in
78
+ Ġ g
79
+ Ġ '</w>
80
+ Ġ B
81
+ l y</w>
82
+ a t</w>
83
+ i v
84
+ t s</w>
85
+ ĠTh e</w>
86
+ u s
87
+ - @</w>
88
+ Ġ@ -@</w>
89
+ i s</w>
90
+ Ġ I
91
+ Ġw h
92
+ i g
93
+ Ġ H
94
+ Ġs t
95
+ o s
96
+ u n
97
+ t h
98
+ Ġ P
99
+ Ġw it
100
+ Ġth at</w>
101
+ i r
102
+ Ġa s</w>
103
+ e m
104
+ Ġo n</w>
105
+ r a
106
+ Ġf or</w>
107
+ Ġ R
108
+ e t
109
+ o w
110
+ Ġ 2
111
+ i d
112
+ Ġ D
113
+ l e</w>
114
+ Ġwit h</w>
115
+ l a
116
+ en t</w>
117
+ i m
118
+ Ġ F
119
+ e a
120
+ i on
121
+ Ġb y</w>
122
+ Ġ )</w>
123
+ Ġ (</w>
124
+ Ġa l
125
+ Ġc on
126
+ en t
127
+ Ġ W
128
+ Ġi s</w>
129
+ er e</w>
130
+ Ġ G
131
+ Ġ N
132
+ Ġ L
133
+ Ġh a
134
+ er s</w>
135
+ r i
136
+ t h</w>
137
+ t ed</w>
138
+ u c
139
+ Ġ J
140
+ Ġ1 9
141
+ e v
142
+ u l
143
+ Ġ v
144
+ c e</w>
145
+ at ion</w>
146
+ ro m</w>
147
+ Ġb e
148
+ Ġ E
149
+ i n</w>
150
+ Ġth e
151
+ Ġf rom</w>
152
+ Ġ O
153
+ t er</w>
154
+ Ġp ro
155
+ Ġa r
156
+ a d
157
+ Ġc om
158
+ i c</w>
159
+ a g
160
+ Ġh is</w>
161
+ Ġs h
162
+ Ġa t</w>
163
+ o v
164
+ i es</w>
165
+ o o
166
+ p p
167
+ s t
168
+ c h
169
+ Ġ r
170
+ Ġ2 0
171
+ a y</w>
172
+ i f
173
+ Ġw ere</w>
174
+ Ġc h
175
+ u t</w>
176
+ s t</w>
177
+ u t
178
+ d s</w>
179
+ o p
180
+ u m
181
+ Ġi t</w>
182
+ o c
183
+ t er
184
+ l e
185
+ ig h
186
+ u d
187
+ Ġe x
188
+ ion s</w>
189
+ at e</w>
190
+ it y</w>
191
+ at ed</w>
192
+ Ġ un
193
+ e p
194
+ q u
195
+ Ġn o
196
+ Ġ K
197
+ iv e</w>
198
+ is t
199
+ Ġo n
200
+ am e</w>
201
+ ou n
202
+ i r</w>
203
+ a b
204
+ Ġ â
205
+ in g
206
+ Ġh e</w>
207
+ l d</w>
208
+ u g
209
+ ic h</w>
210
+ Ġa n</w>
211
+ e d
212
+ Ġ k
213
+ Ġâ Ģ
214
+ Ġha d</w>
215
+ v e</w>
216
+ a in
217
+ Ġs e
218
+ t ion</w>
219
+ or e</w>
220
+ re s
221
+ Ġwh ich</w>
222
+ ĠI n</w>
223
+ o d
224
+ th er</w>
225
+ a k
226
+ Ġs p
227
+ a r</w>
228
+ Ġ y
229
+ ĠC h
230
+ on g</w>
231
+ Ġa c
232
+ es t</w>
233
+ Ġ U
234
+ a p
235
+ f f
236
+ al ly</w>
237
+ r it
238
+ ĠS t
239
+ u b
240
+ g e</w>
241
+ b er</w>
242
+ e t</w>
243
+ Ġb e</w>
244
+ e ar
245
+ Ġre c
246
+ er s
247
+ Ġf ir
248
+ o t
249
+ Ġar e</w>
250
+ Ġa n
251
+ c h</w>
252
+ o g
253
+ i a</w>
254
+ es t
255
+ in e</w>
256
+ il l
257
+ an d
258
+ e l</w>
259
+ ar y</w>
260
+ e w</w>
261
+ i d</w>
262
+ Ġf or
263
+ Ġ ;</w>
264
+ Ġcom p
265
+ Ġ V
266
+ Ġin c
267
+ t r
268
+ Ġ20 0
269
+ Ġthe ir</w>
270
+ u s</w>
271
+ Ġb ut</w>
272
+ r an
273
+ ic al</w>
274
+ Ġfir st</w>
275
+ Ġd e
276
+ Ġin t
277
+ Ġ ro
278
+ s o</w>
279
+ ĠâĢ ĵ</w>
280
+ Ġno t</w>
281
+ d ing</w>
282
+ f ter</w>
283
+ ur e</w>
284
+ Ġp ar
285
+ Ġ :</w>
286
+ i an</w>
287
+ Ġt w
288
+ ou ld</w>
289
+ Ġal so</w>
290
+ Ġi ts</w>
291
+ Ġw or
292
+ u m</w>
293
+ Ġo r</w>
294
+ os t</w>
295
+ 0 0</w>
296
+ ou r
297
+ ar d</w>
298
+ Ġre s
299
+ m p
300
+ u e</w>
301
+ Ġa b
302
+ is h</w>
303
+ Ġcon t
304
+ Ġa d
305
+ ow n</w>
306
+ al l</w>
307
+ ou g
308
+ Ġh er</w>
309
+ as t</w>
310
+ Ġ en
311
+ om e</w>
312
+ al l
313
+ d ed</w>
314
+ o w</w>
315
+ Ġha ve</w>
316
+ Ġ us
317
+ ea r</w>
318
+ ac k</w>
319
+ d uc
320
+ i al</w>
321
+ s s
322
+ en ts</w>
323
+ a in</w>
324
+ t ing</w>
325
+ Ġon e</w>
326
+ es s
327
+ Ġh as</w>
328
+ igh t</w>
329
+ a v
330
+ Ġe v
331
+ ou t</w>
332
+ a y
333
+ en ce</w>
334
+ Ġbe en</w>
335
+ e w
336
+ Ġtw o</w>
337
+ Ġc l
338
+ d er</w>
339
+ im e</w>
340
+ k s</w>
341
+ es s</w>
342
+ is h
343
+ . @</w>
344
+ Ġ@ .@</w>
345
+ Ġp la
346
+ Ġp l
347
+ Ġo r
348
+ u p</w>
349
+ m ent</w>
350
+ ur ing</w>
351
+ ol l
352
+ ĠI n
353
+ Ġth is</w>
354
+ Ġb ec
355
+ Ġcom m
356
+ Ġd is
357
+ at er</w>
358
+ ag e</w>
359
+ Ġa pp
360
+ ou s</w>
361
+ e y</w>
362
+ i l</w>
363
+ p er
364
+ ĠA l
365
+ ion al</w>
366
+ l ud
367
+ el y</w>
368
+ t t
369
+ il e</w>
370
+ i z
371
+ Ġ j
372
+ Ġwh o</w>
373
+ Ġa g
374
+ i b
375
+ Ġthe y</w>
376
+ f or
377
+ Ġo v
378
+ at h
379
+ e g
380
+ Ġs c
381
+ i p
382
+ Ġ20 1
383
+ Ġ 3
384
+ Ġp er
385
+ or y</w>
386
+ Ġd es
387
+ id e</w>
388
+ Ġs er
389
+ s e</w>
390
+ ĠH e</w>
391
+ la nd</w>
392
+ at ions</w>
393
+ r ic
394
+ i t</w>
395
+ re s</w>
396
+ er ed</w>
397
+ Ġp re
398
+ ĠS h
399
+ an ce</w>
400
+ or t</w>
401
+ an t</w>
402
+ , @</w>
403
+ Ġ@ ,@</w>
404
+ el l</w>
405
+ Ġ Y
406
+ n ed</w>
407
+ el l
408
+ it e</w>
409
+ Ġinc lud
410
+ Ġre p
411
+ Ġa fter</w>
412
+ Ġs uc
413
+ re e</w>
414
+ an y</w>
415
+ i m</w>
416
+ or t
417
+ Ġ1 8
418
+ Ġs u
419
+ ad e</w>
420
+ ou r</w>
421
+ ĠU n
422
+ ĠI t</w>
423
+ i k
424
+ ĠM ar
425
+ em ber</w>
426
+ Ġ 1</w>
427
+ e en</w>
428
+ a nd</w>
429
+ Ġs ec
430
+ ic e</w>
431
+ Ġt ime</w>
432
+ ĠA n
433
+ Ġint o</w>
434
+ Ġf in
435
+ Ġo ther</w>
436
+ Ġa tt
437
+ il l</w>
438
+ re n
439
+ ac h
440
+ as s
441
+ er al</w>
442
+ es e</w>
443
+ s h
444
+ al s</w>
445
+ it ion</w>
446
+ oug h</w>
447
+ l es</w>
448
+ am p
449
+ Ġw ould</w>
450
+ Ġm ore</w>
451
+ ro ug
452
+ ri b
453
+ er y</w>
454
+ ac e</w>
455
+ Ġ A</w>
456
+ Ġpla y
457
+ it ed</w>
458
+ k ed</w>
459
+ is t</w>
460
+ i ed</w>
461
+ Ġ 2</w>
462
+ as ed</w>
463
+ ing s</w>
464
+ an g
465
+ a m</w>
466
+ i p</w>
467
+ Ġb o
468
+ ab le</w>
469
+ t y</w>
470
+ Ġch ar
471
+ Ġc ent
472
+ et w
473
+ at es</w>
474
+ ro p
475
+ Ġ I</w>
476
+ u nd</w>
477
+ ĠA m
478
+ c es</w>
479
+ o in
480
+ Ġin ter
481
+ u p
482
+ c t
483
+ on e</w>
484
+ Ġt ra
485
+ an t
486
+ ec t
487
+ Ġal l</w>
488
+ e f
489
+ Ġcon s
490
+ ub l
491
+ n ing</w>
492
+ an s</w>
493
+ Ġf e
494
+ us t</w>
495
+ Ġ 0
496
+ Ġre m
497
+ as e</w>
498
+ on g
499
+ Ġwh en</w>
500
+ e b
501
+ ĠW h
502
+ Ġe ar
503
+ ev er</w>
504
+ Ġov er</w>
505
+ Ġk n
506
+ a us
507
+ Ġp os
508
+ a d</w>
509
+ er m
510
+ Ġsh e</w>
511
+ Ġ ra
512
+ Ġd uring</w>
513
+ as on</w>
514
+ v i
515
+ Ġex p
516
+ Ġl ea
517
+ Ġ el
518
+ Ġ 4
519
+ Ġon ly</w>
520
+ o nd</w>
521
+ Ġd ec
522
+ Ġac c
523
+ Ġo ff
524
+ is s
525
+ Ġf l
526
+ ĠE n
527
+ o t</w>
528
+ en s
529
+ os e</w>
530
+ ak e</w>
531
+ o m</w>
532
+ Ġs ev
533
+ ac h</w>
534
+ etw een</w>
535
+ er n
536
+ Ġ 3</w>
537
+ Ġp r
538
+ Ġg ro
539
+ r uc
540
+ Ġd i
541
+ Ġ19 9
542
+ ĠA r
543
+ Ġg ame</w>
544
+ Ġh im</w>
545
+ oo k</w>
546
+ Ġ up</w>
547
+ Ġab out</w>
548
+ Ġre l
549
+ for m
550
+ Ġth ree</w>
551
+ at t
552
+ ĠC om
553
+ Ġs a
554
+ ear s</w>
555
+ Ġ 5
556
+ r y</w>
557
+ Ġi mp
558
+ Ġm ost</w>
559
+ f er
560
+ Ġp res
561
+ Ġf il
562
+ Ġb etween</w>
563
+ Ġbe g
564
+ p h
565
+ or s</w>
566
+ Ġth an</w>
567
+ Ġrec or
568
+ o b
569
+ er ic
570
+ at ing</w>
571
+ Ġth roug
572
+ k ing</w>
573
+ Ġo ut</w>
574
+ Ġn um
575
+ oo d</w>
576
+ oll ow
577
+ ac t
578
+ u il
579
+ Ġc re
580
+ ol og
581
+ at ional</w>
582
+ Ġpro duc
583
+ Ġwh ile</w>
584
+ Ġl ater</w>
585
+ Ġw rit
586
+ e x
587
+ Ġst ar
588
+ Ġsp ec
589
+ e e
590
+ ish ed</w>
591
+ Ġre g
592
+ is ion</w>
593
+ ou th</w>
594
+ Ġre le
595
+ Ġa ss
596
+ Ġse ason</w>
597
+ Ġm ade</w>
598
+ il y</w>
599
+ r u
600
+ o y
601
+ t ur
602
+ t e</w>
603
+ Ġ qu
604
+ Ġm ov
605
+ ur y</w>
606
+ ĠAm eric
607
+ em ent</w>
608
+ c c
609
+ ou nd</w>
610
+ Ġl ar
611
+ Ġfor m
612
+ ec t</w>
613
+ Ġde f
614
+ Ġm us
615
+ ĠP ar
616
+ Ġm e
617
+ Ġs ub
618
+ w ay</w>
619
+ o p</w>
620
+ o h
621
+ el d</w>
622
+ i e</w>
623
+ em p
624
+ am es</w>
625
+ er n</w>
626
+ Ġn or
627
+ iv ed</w>
628
+ ev el
629
+ Ġsuc h</w>
630
+ ar ds</w>
631
+ Ġin d
632
+ ik e</w>
633
+ Ġg en
634
+ er t
635
+ Ġy ear</w>
636
+ Ġus ed</w>
637
+ Ġn ew</w>
638
+ Ġ 5</w>
639
+ Ġal b
640
+ s p
641
+ y p
642
+ Ġwit h
643
+ Ġwh ere</w>
644
+ ic s</w>
645
+ ĠTh is</w>
646
+ Ġthe m</w>
647
+ w n</w>
neuronxcc-2.15.128.0+56dc5a86/MODULE_0ea3601cf80a3b68f5bb/tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|endoftext|>",
17
+ "unk_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }