dacorvo HF staff commited on
Commit
ae18c2c
·
verified ·
1 Parent(s): 4d0bd24

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +48 -0
  2. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/2104c2569f8076fd08e8.json +1 -0
  3. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4538babd01182da34f41.json +1 -0
  4. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/b40a794eff5ea470980f.json +1 -0
  5. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cdbdbf737272e98ae974.json +1 -0
  6. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/1f8668afaa87da7a4bc8.json +1 -0
  7. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/af0ef68e4ec57e23cab5.json +1 -0
  8. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/e0c5534b610fbfb22b6e.json +1 -0
  9. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/granite/ibm-granite/granite-3.1-2b-instruct/a6c9d0b4d634131c86b8.json +1 -0
  10. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/c1effc43754275f46f73.json +1 -0
  11. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/llamafactory/tiny-random-Llama-3/2460430e72551022bebf.json +1 -0
  12. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/llamafactory/tiny-random-Llama-3/488fda0c00dbe14fb3f5.json +1 -0
  13. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/llamafactory/tiny-random-Llama-3/67c8d108293c3635370a.json +1 -0
  14. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/7d2df9c19098ee676fa8.json +1 -0
  15. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ac0287246ef4575f8b78.json +1 -0
  16. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/af0f1b79454574c744eb.json +1 -0
  17. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/8dde518ab8e435c4c830.json +1 -0
  18. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/9ac32cae9a1e9b72b9ed.json +1 -0
  19. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/d777c911e955212a0132.json +1 -0
  20. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/93d7843b7abe79348924.json +1 -0
  21. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b392ca0eec3368da72ec.json +1 -0
  22. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f829b04255e2c75cd7c8.json +1 -0
  23. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/5f1a2111604ccf1723b9.json +1 -0
  24. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/fa9a9e1f6a33e2f83027.json +1 -0
  25. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-1.5B/842677cbddc5f7674b52.json +1 -0
  26. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-1.5B/9c442126814a181b6b08.json +1 -0
  27. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-14B/4d344eef5c363f6851d8.json +1 -0
  28. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-14B/703843f253e4eab07ed2.json +1 -0
  29. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/2104c2569f8076fd08e8.json +1 -0
  30. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/2bdd504102100c9d1a90.json +1 -0
  31. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/8be3a002e240a4aad032.json +1 -0
  32. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/e9d0ac3eb014579d6b51.json +1 -0
  33. neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/febd5daac4e51196a9ec.json +1 -0
  34. neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/compile_flags.json +1 -0
  35. neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.done +0 -0
  36. neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.hlo_module.pb +3 -0
  37. neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.neff +3 -0
  38. neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/compile_flags.json +1 -0
  39. neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.done +0 -0
  40. neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.hlo_module.pb +3 -0
  41. neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.neff +3 -0
  42. neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/compile_flags.json +1 -0
  43. neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.done +0 -0
  44. neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.hlo_module.pb +3 -0
  45. neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.neff +3 -0
  46. neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/compile_flags.json +1 -0
  47. neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.done +0 -0
  48. neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.hlo_module.pb +3 -0
  49. neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.neff +3 -0
  50. neuronxcc-2.15.143.0+e39249ad/MODULE_18ac4eac08526eac7029+39f12043/compile_flags.json +1 -0
.gitattributes CHANGED
@@ -1444,3 +1444,51 @@ neuronxcc-2.15.143.0+e39249ad/MODULE_f7f5761cfc68a7c66eb3+39f12043/model.neff fi
1444
  neuronxcc-2.15.143.0+e39249ad/MODULE_fbc0c1dcbdfb5e889d3e+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1445
  neuronxcc-2.15.143.0+e39249ad/MODULE_fbfe999e7918c5e3c314+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1446
  neuronxcc-2.15.143.0+e39249ad/MODULE_fe705993427e4cf84ece+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1444
  neuronxcc-2.15.143.0+e39249ad/MODULE_fbc0c1dcbdfb5e889d3e+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1445
  neuronxcc-2.15.143.0+e39249ad/MODULE_fbfe999e7918c5e3c314+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1446
  neuronxcc-2.15.143.0+e39249ad/MODULE_fe705993427e4cf84ece+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1447
+ neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1448
+ neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1449
+ neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1450
+ neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1451
+ neuronxcc-2.15.143.0+e39249ad/MODULE_18ac4eac08526eac7029+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1452
+ neuronxcc-2.15.143.0+e39249ad/MODULE_1bda9f34935c9bf8dc56+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1453
+ neuronxcc-2.15.143.0+e39249ad/MODULE_1f6cff7b8c7ee3876236+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1454
+ neuronxcc-2.15.143.0+e39249ad/MODULE_2fafc1ff132d5f941aad+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1455
+ neuronxcc-2.15.143.0+e39249ad/MODULE_3af7e7ef3791e98d3c6d+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1456
+ neuronxcc-2.15.143.0+e39249ad/MODULE_4210c04230b8ddb8bfe3+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1457
+ neuronxcc-2.15.143.0+e39249ad/MODULE_46e098601535b4170185+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1458
+ neuronxcc-2.15.143.0+e39249ad/MODULE_490667793af055c0d022+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1459
+ neuronxcc-2.15.143.0+e39249ad/MODULE_4f8465ac364975d34bcb+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1460
+ neuronxcc-2.15.143.0+e39249ad/MODULE_58a06b133e4373d19b25+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1461
+ neuronxcc-2.15.143.0+e39249ad/MODULE_5f22d1149f72db0f4434+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1462
+ neuronxcc-2.15.143.0+e39249ad/MODULE_630d01f87d4ca43039d0+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1463
+ neuronxcc-2.15.143.0+e39249ad/MODULE_63f5e8d21de3e42de924+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1464
+ neuronxcc-2.15.143.0+e39249ad/MODULE_640ad25f4ab9e0ce4ac7+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1465
+ neuronxcc-2.15.143.0+e39249ad/MODULE_6be6b345d395a077ac49+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1466
+ neuronxcc-2.15.143.0+e39249ad/MODULE_6dde87e83769645bfc03+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1467
+ neuronxcc-2.15.143.0+e39249ad/MODULE_722d3c4ebc79e2bbd531+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1468
+ neuronxcc-2.15.143.0+e39249ad/MODULE_766c4484fa2a48856510+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1469
+ neuronxcc-2.15.143.0+e39249ad/MODULE_792bc0b042fc30982724+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1470
+ neuronxcc-2.15.143.0+e39249ad/MODULE_7abeefdbfb3d46057180+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1471
+ neuronxcc-2.15.143.0+e39249ad/MODULE_7e32a252210084dd58cf+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1472
+ neuronxcc-2.15.143.0+e39249ad/MODULE_820d66fcac0a38111aaa+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1473
+ neuronxcc-2.15.143.0+e39249ad/MODULE_90a3aeeb3ae6558cbe52+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1474
+ neuronxcc-2.15.143.0+e39249ad/MODULE_949f40023667a5d24986+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1475
+ neuronxcc-2.15.143.0+e39249ad/MODULE_94d0127c087e99e1ffc5+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1476
+ neuronxcc-2.15.143.0+e39249ad/MODULE_a1e4d9c618b104013c87+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1477
+ neuronxcc-2.15.143.0+e39249ad/MODULE_a32f6f117982c699866a+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1478
+ neuronxcc-2.15.143.0+e39249ad/MODULE_ab8385586066f255c69b+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1479
+ neuronxcc-2.15.143.0+e39249ad/MODULE_abe0505300d1da0ba93a+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1480
+ neuronxcc-2.15.143.0+e39249ad/MODULE_b00de76b80db9e1a1ae8+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1481
+ neuronxcc-2.15.143.0+e39249ad/MODULE_b420f7ff165f4f0517c3+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1482
+ neuronxcc-2.15.143.0+e39249ad/MODULE_bb86f1f35879e84a7798+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1483
+ neuronxcc-2.15.143.0+e39249ad/MODULE_c34e768d7d11ab63321f+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1484
+ neuronxcc-2.15.143.0+e39249ad/MODULE_c8e6c3d45f5962b8636e+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1485
+ neuronxcc-2.15.143.0+e39249ad/MODULE_e5aad17a33049f216efe+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1486
+ neuronxcc-2.15.143.0+e39249ad/MODULE_e8560d399e2dbba86969+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1487
+ neuronxcc-2.15.143.0+e39249ad/MODULE_e8cdd90331ca20dce129+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1488
+ neuronxcc-2.15.143.0+e39249ad/MODULE_f4bd63863e7609260d2f+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1489
+ neuronxcc-2.15.143.0+e39249ad/MODULE_f63ff654073837167064+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1490
+ neuronxcc-2.15.143.0+e39249ad/MODULE_f98a98a6f8c781e6c601+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1491
+ neuronxcc-2.15.143.0+e39249ad/MODULE_facd39ab923dc333c132+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1492
+ neuronxcc-2.15.143.0+e39249ad/MODULE_fcdf5b6858bafa453910+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1493
+ neuronxcc-2.15.143.0+e39249ad/MODULE_ff275b5606db64ceaea0+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
1494
+ neuronxcc-2.15.143.0+e39249ad/MODULE_ffad069e135095fb2041+39f12043/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.27/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/2104c2569f8076fd08e8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/4538babd01182da34f41.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/b40a794eff5ea470980f.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cdbdbf737272e98ae974.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/1f8668afaa87da7a4bc8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/af0ef68e4ec57e23cab5.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/e0c5534b610fbfb22b6e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/granite/ibm-granite/granite-3.1-2b-instruct/a6c9d0b4d634131c86b8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["GraniteForCausalLM"], "attention_bias": false, "attention_dropout": 0.1, "attention_multiplier": 0.015625, "bos_token_id": 0, "embedding_multiplier": 12.0, "eos_token_id": 0, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "logits_scaling": 8.0, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "granite", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "ibm-granite/granite-3.1-2b-instruct", "checkpoint_revision": "a06c9a0fef05d8111ca1f77b60f477443c526043", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 8, "pad_token_id": 0, "residual_multiplier": 0.22, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 5000000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 49155}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/c1effc43754275f46f73.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "57a73110702e7b05ba3f39fef36297454c680725", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/llamafactory/tiny-random-Llama-3/2460430e72551022bebf.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128009, "head_dim": 4, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "2d9d8c1112e9cd4b0d66bb612e09be7da7997b18", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/llamafactory/tiny-random-Llama-3/488fda0c00dbe14fb3f5.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128009, "head_dim": 4, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "2d9d8c1112e9cd4b0d66bb612e09be7da7997b18", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/llama/llamafactory/tiny-random-Llama-3/67c8d108293c3635370a.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128009, "head_dim": 4, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 4096, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "2d9d8c1112e9cd4b0d66bb612e09be7da7997b18", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/7d2df9c19098ee676fa8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ac0287246ef4575f8b78.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/af0f1b79454574c744eb.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/8dde518ab8e435c4c830.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/9ac32cae9a1e9b72b9ed.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/mixtral/dacorvo/Mixtral-tiny/d777c911e955212a0132.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/93d7843b7abe79348924.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/b392ca0eec3368da72ec.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f829b04255e2c75cd7c8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/5f1a2111604ccf1723b9.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 896, "initializer_range": 0.02, "intermediate_size": 4864, "max_position_embeddings": 32768, "max_window_layers": 24, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-0.5B", "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-0.5B/fa9a9e1f6a33e2f83027.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 896, "initializer_range": 0.02, "intermediate_size": 4864, "max_position_embeddings": 32768, "max_window_layers": 24, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-0.5B", "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-1.5B/842677cbddc5f7674b52.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 1536, "initializer_range": 0.02, "intermediate_size": 8960, "max_position_embeddings": 131072, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-1.5B", "checkpoint_revision": "8faed761d45a263340a0528343f099c05c9a4323", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 12, "num_hidden_layers": 28, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-1.5B/9c442126814a181b6b08.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 1536, "initializer_range": 0.02, "intermediate_size": 8960, "max_position_embeddings": 131072, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-1.5B", "checkpoint_revision": "8faed761d45a263340a0528343f099c05c9a4323", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 12, "num_hidden_layers": 28, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "use_cache": true, "use_mrope": false, "use_sliding_window": false, "vocab_size": 151936}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-14B/4d344eef5c363f6851d8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 131072, "max_window_layers": 48, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 16, "checkpoint_id": "Qwen/Qwen2.5-14B", "checkpoint_revision": "97e1e76335b7017d8f67c08a19d103c0504298c9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 48, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-14B/703843f253e4eab07ed2.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 5120, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 131072, "max_window_layers": 48, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-14B", "checkpoint_revision": "97e1e76335b7017d8f67c08a19d103c0504298c9", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 40, "num_hidden_layers": 48, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/2104c2569f8076fd08e8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 4, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/2bdd504102100c9d1a90.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 8, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/8be3a002e240a4aad032.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 32, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/e9d0ac3eb014579d6b51.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 8, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/0_REGISTRY/0.0.28.dev0/inference/qwen2/Qwen/Qwen2.5-7B-Instruct/febd5daac4e51196a9ec.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["Qwen2ForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "qwen2", "neuron": {"auto_cast_type": "bf16", "batch_size": 1, "checkpoint_id": "Qwen/Qwen2.5-7B-Instruct", "checkpoint_revision": "bb46c15ee4bb56c5b63245ef50fd7637234d6f75", "compiler_type": "neuronx-cc", "compiler_version": "2.15.143.0+e39249ad", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064}
neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31328ce3e0b8ebca0e6e1fb8cba104adec152da86fcee059e6f9fe7048b51061
3
+ size 332401
neuronxcc-2.15.143.0+e39249ad/MODULE_001203e0776f75fd5c22+39f12043/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68b8271a2b74aab6afe6459825d7968aa30f36708e67a0e11e000c05c9cb97ba
3
+ size 10824704
neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e145ffb961cb8fa1e1d278319478ba5ba830affdb423e99eba76d81b7dadb7da
3
+ size 419178
neuronxcc-2.15.143.0+e39249ad/MODULE_0a075561d4899abfb889+39f12043/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55b95de6a493f82b83845faa74514068ad88b72c14d82db3b0ae813474032c7d
3
+ size 3103744
neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b10a1493db43406f4f9f4b13e28a216f5e9f1163341770d30717dbc4ac95ef72
3
+ size 412258
neuronxcc-2.15.143.0+e39249ad/MODULE_174e0a7d5d41956d00b4+39f12043/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77fb4416db2a7b970537be7d27b05f92a6dea595ef64f8a0379df92ba6e8a374
3
+ size 3329024
neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.done ADDED
File without changes
neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:760c002b4adfe0a2d23f7514f0e27f2477895cd205794a26eb335a9414aaf2b5
3
+ size 325561
neuronxcc-2.15.143.0+e39249ad/MODULE_17fcb33cafc343f9896e+39f12043/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ffff9e77e8b1ac761ada705f6a2737437b2ac48a49276b4565a5ffcd99c87be
3
+ size 10855424
neuronxcc-2.15.143.0+e39249ad/MODULE_18ac4eac08526eac7029+39f12043/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]