optimum-internal-testing-user commited on
Commit
d12da4e
·
verified ·
1 Parent(s): cdcda80

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +30 -0
  2. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/058e3ead348125e7808b.json +1 -0
  3. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/6bf9ef24cedd2d181630.json +1 -0
  4. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/7a4c62323b876a6ad082.json +1 -0
  5. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/3033d959070fc853146f.json +1 -0
  6. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/46dc72bec418e48dc0b2.json +1 -0
  7. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/fdfe8a25cf9f41629cb9.json +1 -0
  8. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/7bae97d51948b959db6b.json +1 -0
  9. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/llamafactory/tiny-random-Llama-3/2e8cb42c92e334dfbaa9.json +1 -0
  10. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/llamafactory/tiny-random-Llama-3/5433fffb45752137c8de.json +1 -0
  11. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/llamafactory/tiny-random-Llama-3/8c48d1da1325809763e8.json +1 -0
  12. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/3afe2ef31d28f1e456be.json +1 -0
  13. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/aa9d07318c2ee53a79c9.json +1 -0
  14. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ef87caaa517e593a73ba.json +1 -0
  15. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mixtral/dacorvo/Mixtral-tiny/44ccd5195da49c90a57d.json +1 -0
  16. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mixtral/dacorvo/Mixtral-tiny/908d5d307e281ceae80e.json +1 -0
  17. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mixtral/dacorvo/Mixtral-tiny/be71598b4b4a5244a6a6.json +1 -0
  18. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/28192c462305efbf7703.json +1 -0
  19. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/62a1679ebaf9c75228d1.json +1 -0
  20. neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/7e39204d4342f7703417.json +1 -0
  21. neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/compile_flags.json +1 -0
  22. neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.done +0 -0
  23. neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.hlo_module.pb +3 -0
  24. neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.neff +3 -0
  25. neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/compile_flags.json +1 -0
  26. neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.done +0 -0
  27. neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.hlo_module.pb +3 -0
  28. neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.neff +3 -0
  29. neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/compile_flags.json +1 -0
  30. neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/model.done +0 -0
  31. neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/model.hlo_module.pb +3 -0
  32. neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/model.neff +0 -0
  33. neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/compile_flags.json +1 -0
  34. neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/model.done +0 -0
  35. neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/model.hlo_module.pb +3 -0
  36. neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/model.neff +0 -0
  37. neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/compile_flags.json +1 -0
  38. neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.done +0 -0
  39. neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.hlo_module.pb +3 -0
  40. neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.neff +3 -0
  41. neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/compile_flags.json +1 -0
  42. neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.done +0 -0
  43. neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.hlo_module.pb +3 -0
  44. neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.neff +3 -0
  45. neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/compile_flags.json +1 -0
  46. neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.done +0 -0
  47. neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.hlo_module.pb +3 -0
  48. neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.neff +3 -0
  49. neuronxcc-2.16.372.0+4a9b2326/MODULE_2b6914194b931d7496fc+613edded/compile_flags.json +1 -0
  50. neuronxcc-2.16.372.0+4a9b2326/MODULE_2b6914194b931d7496fc+613edded/model.done +0 -0
.gitattributes CHANGED
@@ -1682,3 +1682,33 @@ neuronxcc-2.16.372.0+4a9b2326/MODULE_1090c91454701eac92a9+613edded/model.neff fi
1682
  neuronxcc-2.16.372.0+4a9b2326/MODULE_5a9cc28c7336fd9ea3ab+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1683
  neuronxcc-2.16.372.0+4a9b2326/MODULE_ab8029e291c37141655e+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1684
  neuronxcc-2.16.372.0+4a9b2326/MODULE_aeecac19cd61e7fb1951+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1682
  neuronxcc-2.16.372.0+4a9b2326/MODULE_5a9cc28c7336fd9ea3ab+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1683
  neuronxcc-2.16.372.0+4a9b2326/MODULE_ab8029e291c37141655e+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1684
  neuronxcc-2.16.372.0+4a9b2326/MODULE_aeecac19cd61e7fb1951+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1685
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1686
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1687
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1688
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1689
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1690
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_2b6914194b931d7496fc+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1691
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_320f2622d4d0c9fdd0f1+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1692
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_3d7f2d2bef4f6fdd2c74+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1693
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_3e5f6b34247d2b457ec5+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1694
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_3f0110aa8aef5f42c4bc+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1695
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_40f92bf9469aae653e93+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1696
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_48437cc74469a8ccaec8+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1697
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_506e29dd5cc46918936d+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1698
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_5de5ea910315ba9c0def+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1699
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_6819e64fb96e87ffece0+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1700
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_7062a76356ca462bcc78+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1701
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_71a034dd7c4a3afb59c6+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1702
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_7329da261de607372f14+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1703
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_75bc57af47ebdc36a75e+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1704
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_850d5a3d0694f6f65d91+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1705
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_8b38e41cf0a3c0152b87+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1706
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_8bb2b91314df12f54a63+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1707
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_a297084c31f9c1ff1d5d+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1708
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_abb19eb6336ab05b7e19+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1709
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_c49c0c3715f68c22b32f+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1710
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_dcc643bd43691cdb2cd2+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1711
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_df48af4bf01af7f3857e+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1712
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_e50c30ad9376ce18d039+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1713
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_e8eb6214f5c387ad6b43+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
1714
+ neuronxcc-2.16.372.0+4a9b2326/MODULE_f6b2b8267d631f2f0fad+613edded/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/058e3ead348125e7808b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/6bf9ef24cedd2d181630.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/7a4c62323b876a6ad082.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"apply_residual_connection_post_layernorm": false, "architectures": ["BloomForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "gradient_checkpointing": false, "hidden_dropout": 0.1, "hidden_size": 32, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "initializer_range": 0.02, "is_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "layer_norm_epsilon": 1e-05, "model_type": "bloom", "n_head": 4, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "92b07e9b7b4f986fa7c54e2ac3b9201b4ba5212e", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 3, "pretraining_tp": 1, "seq_length": 7, "slow_but_exact": true, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 1024}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/3033d959070fc853146f.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/46dc72bec418e48dc0b2.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/gpt2/hf-internal-testing/tiny-random-gpt2/fdfe8a25cf9f41629cb9.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation_function": "gelu_new", "attention_probs_dropout_prob": 0.1, "attn_pdrop": 0.1, "bos_token_id": 98, "embd_pdrop": 0.1, "eos_token_id": 98, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "intermediate_size": 37, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 512, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "n_positions": 512, "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "71034c5d8bde858ff824298bdedc65515b97d2b9", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "pad_token_id": 98, "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "type_vocab_size": 16, "use_cache": true, "vocab_size": 1000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/NousResearch/Hermes-2-Theta-Llama-3-8B/7bae97d51948b959db6b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": 128003, "head_dim": 128, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 4, "checkpoint_id": "NousResearch/Hermes-2-Theta-Llama-3-8B", "checkpoint_revision": "57a73110702e7b05ba3f39fef36297454c680725", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 4096, "task": "text-generation"}, "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/llamafactory/tiny-random-Llama-3/2e8cb42c92e334dfbaa9.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 4, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/llamafactory/tiny-random-Llama-3/5433fffb45752137c8de.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 4, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/llama/llamafactory/tiny-random-Llama-3/8c48d1da1325809763e8.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["LlamaForCausalLM"], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 128000, "eos_token_id": [128001, 128008, 128009], "head_dim": 4, "hidden_act": "silu", "hidden_size": 16, "initializer_range": 0.02, "intermediate_size": 64, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "llamafactory/tiny-random-Llama-3", "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": {"factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3"}, "rope_theta": 500000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 128256}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/3afe2ef31d28f1e456be.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/aa9d07318c2ee53a79c9.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mistral/dacorvo/tiny-random-MistralForCausalLM/ef87caaa517e593a73ba.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MistralForCausalLM"], "attention_dropout": 0.0, "attention_probs_dropout_prob": 0.1, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 8, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 32, "initializer_range": 0.02, "intermediate_size": 37, "is_decoder": true, "max_position_embeddings": 512, "model_type": "mistral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 2, "pad_token_id": 0, "rms_norm_eps": 1e-06, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "type_vocab_size": 16, "use_cache": true, "vocab_size": 32000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mixtral/dacorvo/Mixtral-tiny/44ccd5195da49c90a57d.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 32, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mixtral/dacorvo/Mixtral-tiny/908d5d307e281ceae80e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 32, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/mixtral/dacorvo/Mixtral-tiny/be71598b4b4a5244a6a6.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architectures": ["MixtralForCausalLM"], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "head_dim": 32, "hidden_act": "silu", "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 3584, "max_position_embeddings": 1024, "model_type": "mixtral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/Mixtral-tiny", "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 32, "num_experts_per_tok": 2, "num_hidden_layers": 2, "num_key_value_heads": 8, "num_local_experts": 8, "output_router_logits": false, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "router_aux_loss_coef": 0.001, "router_jitter_noise": 0.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float16", "use_cache": true, "vocab_size": 32000}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/28192c462305efbf7703.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 2, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/62a1679ebaf9c75228d1.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.16.372.0+4a9b2326/0_REGISTRY/0.1.0.dev0/inference/opt/hf-internal-testing/tiny-random-OPTForCausalLM/7e39204d4342f7703417.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_remove_final_layer_norm": false, "activation_function": "relu", "architectures": ["OPTForCausalLM"], "attention_dropout": 0.1, "bos_token_id": 2, "do_layer_norm_before": true, "dropout": 0.1, "embed_dim": 16, "enable_bias": true, "eos_token_id": 2, "ffn_dim": 4, "hidden_size": 16, "init_std": 0.02, "is_decoder": true, "layer_norm_elementwise_affine": true, "layerdrop": 0.0, "max_position_embeddings": 100, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "3f3f2ee1e499cb7ad89b877068684fdc9d9513c3", "compiler_type": "neuronx-cc", "compiler_version": "2.16.372.0+4a9b2326", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "num_attention_heads": 4, "num_hidden_layers": 5, "pad_token_id": 1, "torch_dtype": "float32", "use_cache": true, "vocab_size": 50265, "word_embed_proj_dim": 16}
neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efcd712f2b813d9c5c9c26bcd8b968439cbbb0ab9ebda7d08942cd18a620bfaa
3
+ size 44778
neuronxcc-2.16.372.0+4a9b2326/MODULE_032f74178031f5ed3c74+613edded/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4bffcb1f86fd84a545356bdb26cefe345ee3f0f76e7ff517d210d5c497c1ef4
3
+ size 379904
neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0975fc341db9cc54898ed384ab7098894c740e7e46124a5b9bc5891aa710b364
3
+ size 43983
neuronxcc-2.16.372.0+4a9b2326/MODULE_0a88901e8c98f54e4c10+613edded/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a24ed0b727fa8f388c6ac92c0d3f6e7ce17f332852742fd0d86934a7858ced59
3
+ size 451584
neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3668379cd299c92b1ee77b0d6ae0a86fc0524b635fc260bb4bf9756dfec73112
3
+ size 52850
neuronxcc-2.16.372.0+4a9b2326/MODULE_0cfdfd8c26d66b282d5a+613edded/model.neff ADDED
Binary file (93.2 kB). View file
 
neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=5"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ac630532ba8a68d0985b0ed14fae2813e6d77509ee139688d6ea634bafd8287
3
+ size 9509
neuronxcc-2.16.372.0+4a9b2326/MODULE_113f3268e3fd4d66fe81+8a3305d3/model.neff ADDED
Binary file (42 kB). View file
 
neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9cb63ce2b40034604ffea4caf5a28e1bf86bde5b9136a77cd8df401350bcf7f
3
+ size 52471
neuronxcc-2.16.372.0+4a9b2326/MODULE_151e013069d6b102df91+613edded/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4795e651be8fc2efc5357b6ef737a48e023a26301f96a6f6a3a1b7fbf4c28a50
3
+ size 123904
neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d94fe1a947149b651a7c79c80adadadad51f361bcfe470c5c63c2e0ab0550ba6
3
+ size 16718
neuronxcc-2.16.372.0+4a9b2326/MODULE_24ff9ac2787ce9a1d276+613edded/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1b3dd692e27ea4a48c247cc6874c6179b1b1b5e8cc66f1031b9d11973721b8d
3
+ size 154624
neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.done ADDED
File without changes
neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c8b665decfdceb8bee663b03951fa18f2938b895bef637f05f1f043895356d1
3
+ size 54843
neuronxcc-2.16.372.0+4a9b2326/MODULE_272d7dabaeb48e6d7210+613edded/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b008714581496073b2d4a9f2f46e7a53c7c79b2c5bed426bca55a955bfabad73
3
+ size 123904
neuronxcc-2.16.372.0+4a9b2326/MODULE_2b6914194b931d7496fc+613edded/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--model-type=transformer", "--auto-cast=none", "--execute-repetition=1"]
neuronxcc-2.16.372.0+4a9b2326/MODULE_2b6914194b931d7496fc+613edded/model.done ADDED
File without changes