Delete neuronxcc-2.12.68.0+4480452af
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/099759272d993c40a970.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/239f41145d1b767d334a.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/796b0a69b6cfb147681c.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cf86851df25066dfc4a8.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/gpt2/2081a5c9a30393646e54.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/gpt2/3384c3f7f81ef00bc7b0.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/0300ef28a839709a0896.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/13764fb2969c5a940316.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/24c47890e4ce14857524.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/ca8d99bc6374cb0ea37c.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/d70210c8fe7cbaf377ab.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/eb42bab3c5050bb02069.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/0c5dcee71eb09cd78ca3.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/36b96843d581cc86fdd1.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/4f598bddf7d4afda210f.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/d9d1f24f085b3e1ee495.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-13b-chat-hf/35e204ec2c1ceb94bc9f.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-13b-chat-hf/407e53aa03015f20f315.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-13b-chat-hf/ccdce1a61c9ada824b10.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-70b-hf/137153849e81a4e70bcc.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-70b-hf/57dc4df464b0f3e2cc38.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-7b-chat-hf/a23bae888fd7fe3142b6.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-7b-chat-hf/a69cd10a7ef9072e186d.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-7b-chat-hf/cba933997ecb39d1bd8c.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/29fa14d6dca5ddb7b6cc.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/2d648875fb6505476df2.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/9d0f7edcd3fa4ddb10ad.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/f9c9bba59ee26411cf80.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/06fc7c62762607ab744e.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/5975d2858f9bce8ef290.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/9a8b7079ddb8a7611d5a.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/fbf45e5cda8a458d37d3.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/3aead8155eb174dbe347.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/5db6675ddfc8e4d5d6c3.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/b00a671aa7624d706817.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/efae9c5d84d090ec8a1d.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/0128c1baa44ad491d0f4.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/6f2444cfa075d1f210d0.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/ea89a1b8198a05d1e71d.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f3ff2a63bf38db3d9595.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-13b-chat-hf/190a7b1844748ebd6f79.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-13b-chat-hf/b39207446be340f95f2a.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-13b-chat-hf/c5566dad938f8351a225.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/3671f4f6e9fb55e4d4fd.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/11e18115af2cd459e483.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/15e8c952b3c81fd87a57.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/25d78386bf1607f07ae3.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/3172e154191b4ca8bee2.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/348c549d4253f4d0d1e3.json +0 -1
- neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/443c93e614cf44693e97.json +0 -1
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/099759272d993c40a970.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1024, "hidden_size": 32, "n_layer": 5, "n_head": 4, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "use_cache": true, "pretraining_tp": 1, "apply_residual_connection_post_layernorm": false, "hidden_dropout": 0.1, "attention_dropout": 0.1, "bos_token_id": 1, "eos_token_id": 2, "slow_but_exact": true, "torch_dtype": "float32", "is_decoder": true, "architectures": ["BloomForCausalLM"], "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "pad_token_id": 3, "dtype": "float32", "gradient_checkpointing": false, "model_type": "bloom", "n_positions": 512, "seq_length": 7, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 2, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "0f4f06f162cd67d34d03ee156484e4001d468500"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/239f41145d1b767d334a.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1024, "hidden_size": 32, "n_layer": 5, "n_head": 4, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "use_cache": true, "pretraining_tp": 1, "apply_residual_connection_post_layernorm": false, "hidden_dropout": 0.1, "attention_dropout": 0.1, "bos_token_id": 1, "eos_token_id": 2, "slow_but_exact": true, "torch_dtype": "float32", "is_decoder": true, "architectures": ["BloomForCausalLM"], "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "pad_token_id": 3, "dtype": "float32", "gradient_checkpointing": false, "model_type": "bloom", "n_positions": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "0f4f06f162cd67d34d03ee156484e4001d468500", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "seq_length": 7, "type_vocab_size": 16}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/796b0a69b6cfb147681c.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1024, "hidden_size": 32, "n_layer": 5, "n_head": 4, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "use_cache": true, "pretraining_tp": 1, "apply_residual_connection_post_layernorm": false, "hidden_dropout": 0.1, "attention_dropout": 0.1, "bos_token_id": 1, "eos_token_id": 2, "slow_but_exact": true, "torch_dtype": "float32", "is_decoder": true, "architectures": ["BloomForCausalLM"], "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "pad_token_id": 3, "dtype": "float32", "gradient_checkpointing": false, "model_type": "bloom", "n_positions": 512, "seq_length": 7, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "0f4f06f162cd67d34d03ee156484e4001d468500"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/bloom/hf-internal-testing/tiny-random-BloomForCausalLM/cf86851df25066dfc4a8.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1024, "hidden_size": 32, "n_layer": 5, "n_head": 4, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "use_cache": true, "pretraining_tp": 1, "apply_residual_connection_post_layernorm": false, "hidden_dropout": 0.1, "attention_dropout": 0.1, "bos_token_id": 1, "eos_token_id": 2, "slow_but_exact": true, "torch_dtype": "float32", "is_decoder": true, "architectures": ["BloomForCausalLM"], "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "pad_token_id": 3, "dtype": "float32", "gradient_checkpointing": false, "model_type": "bloom", "n_positions": 512, "seq_length": 7, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-BloomForCausalLM", "checkpoint_revision": "0f4f06f162cd67d34d03ee156484e4001d468500"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/gpt2/2081a5c9a30393646e54.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 50257, "n_positions": 1024, "n_embd": 768, "n_layer": 12, "n_head": 12, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 50256, "eos_token_id": 50256, "architectures": ["GPT2LMHeadModel"], "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "model_type": "gpt2", "n_ctx": 1024, "neuron": {"task": "text-generation", "batch_size": 16, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 1024, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "gpt2", "checkpoint_revision": "11c5a3d5811f50298f278a704980280950aedb10"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/gpt2/3384c3f7f81ef00bc7b0.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 50257, "n_positions": 1024, "n_embd": 768, "n_layer": 12, "n_head": 12, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 50256, "eos_token_id": 50256, "architectures": ["GPT2LMHeadModel"], "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "model_type": "gpt2", "n_ctx": 1024, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 1024, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "gpt2", "checkpoint_revision": "11c5a3d5811f50298f278a704980280950aedb10"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/0300ef28a839709a0896.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1000, "n_positions": 512, "n_embd": 32, "n_layer": 5, "n_head": 4, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 98, "eos_token_id": 98, "pad_token_id": 98, "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "intermediate_size": 37, "model_type": "gpt2", "n_ctx": 512, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/13764fb2969c5a940316.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1000, "n_positions": 512, "n_embd": 32, "n_layer": 5, "n_head": 4, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 98, "eos_token_id": 98, "pad_token_id": 98, "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "intermediate_size": 37, "model_type": "gpt2", "n_ctx": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 16, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b", "compiler_type": "neuronx-cc", "compiler_version": "2.12.54.0+f631c2365", "num_cores": 2, "sequence_length": 512, "task": "text-generation"}, "type_vocab_size": 16}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/24c47890e4ce14857524.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1000, "n_positions": 512, "n_embd": 32, "n_layer": 5, "n_head": 4, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 98, "eos_token_id": 98, "pad_token_id": 98, "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "intermediate_size": 37, "model_type": "gpt2", "n_ctx": 512, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 24, "auto_cast_type": "fp32", "sequence_length": 512, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/ca8d99bc6374cb0ea37c.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1000, "n_positions": 512, "n_embd": 32, "n_layer": 5, "n_head": 4, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 98, "eos_token_id": 98, "pad_token_id": 98, "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "intermediate_size": 37, "model_type": "gpt2", "n_ctx": 512, "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "type_vocab_size": 16}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/d70210c8fe7cbaf377ab.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1000, "n_positions": 512, "n_embd": 32, "n_layer": 5, "n_head": 4, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 98, "eos_token_id": 98, "pad_token_id": 98, "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "intermediate_size": 37, "model_type": "gpt2", "n_ctx": 512, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 2, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/gpt2/hf-internal-testing/tiny-random-gpt2/eb42bab3c5050bb02069.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 1000, "n_positions": 512, "n_embd": 32, "n_layer": 5, "n_head": 4, "n_inner": null, "activation_function": "gelu_new", "resid_pdrop": 0.1, "embd_pdrop": 0.1, "attn_pdrop": 0.1, "layer_norm_epsilon": 1e-05, "initializer_range": 0.02, "summary_type": "cls_index", "summary_use_proj": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "scale_attn_weights": true, "use_cache": true, "scale_attn_by_inverse_layer_idx": false, "reorder_and_upcast_attn": false, "bos_token_id": 98, "eos_token_id": 98, "pad_token_id": 98, "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "intermediate_size": 37, "model_type": "gpt2", "n_ctx": 512, "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-gpt2", "checkpoint_revision": "91c0fe31d692dd8448d9bc06e8d1877345009e3b"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/0c5dcee71eb09cd78ca3.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 128, "intermediate_size": 256, "num_hidden_layers": 1, "num_attention_heads": 1, "num_key_value_heads": 1, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/36b96843d581cc86fdd1.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 128, "intermediate_size": 256, "num_hidden_layers": 1, "num_attention_heads": 1, "num_key_value_heads": 1, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/4f598bddf7d4afda210f.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 128, "intermediate_size": 256, "num_hidden_layers": 1, "num_attention_heads": 1, "num_key_value_heads": 1, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 2, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/dacorvo/tiny-random-llama/d9d1f24f085b3e1ee495.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 128, "intermediate_size": 256, "num_hidden_layers": 1, "num_attention_heads": 1, "num_key_value_heads": 1, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "dacorvo/tiny-random-llama", "checkpoint_revision": "7fdafd2fe6a2d31c6abb72ae60db606d8bb23196"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-13b-chat-hf/35e204ec2c1ceb94bc9f.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 5120, "intermediate_size": 13824, "num_hidden_layers": 40, "num_attention_heads": 40, "num_key_value_heads": 40, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 24, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "c2f3ec81aac798ae26dcc57799a994dfbf521496"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-13b-chat-hf/407e53aa03015f20f315.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 5120, "intermediate_size": 13824, "num_hidden_layers": 40, "num_attention_heads": 40, "num_key_value_heads": 40, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 24, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "c2f3ec81aac798ae26dcc57799a994dfbf521496"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-13b-chat-hf/ccdce1a61c9ada824b10.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 5120, "intermediate_size": 13824, "num_hidden_layers": 40, "num_attention_heads": 40, "num_key_value_heads": 40, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-13b-chat-hf", "checkpoint_revision": "c2f3ec81aac798ae26dcc57799a994dfbf521496"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-70b-hf/137153849e81a4e70bcc.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 8192, "intermediate_size": 28672, "num_hidden_layers": 80, "num_attention_heads": 64, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"auto_cast_type": "fp16", "batch_size": 1, "checkpoint_id": "meta-llama/Llama-2-70b-hf", "checkpoint_revision": "90052941a64de02075ca800b09fcea1bdaacb939", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 24, "sequence_length": 2048, "task": "text-generation"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-70b-hf/57dc4df464b0f3e2cc38.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 8192, "intermediate_size": 28672, "num_hidden_layers": 80, "num_attention_heads": 64, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 24, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-70b-hf", "checkpoint_revision": "90052941a64de02075ca800b09fcea1bdaacb939"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-7b-chat-hf/a23bae888fd7fe3142b6.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-7b-chat-hf/a69cd10a7ef9072e186d.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/llama/meta-llama/Llama-2-7b-chat-hf/cba933997ecb39d1bd8c.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/29fa14d6dca5ddb7b6cc.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 2, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "dc24cabd13eacd3ae3a5fe574bd645483a335a4a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/2d648875fb6505476df2.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 2, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 8, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "dc24cabd13eacd3ae3a5fe574bd645483a335a4a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/9d0f7edcd3fa4ddb10ad.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 2, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 8, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "dc24cabd13eacd3ae3a5fe574bd645483a335a4a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/HuggingFaceH4/zephyr-7b-beta/f9c9bba59ee26411cf80.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 2, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 2, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "HuggingFaceH4/zephyr-7b-beta", "checkpoint_revision": "dc24cabd13eacd3ae3a5fe574bd645483a335a4a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/06fc7c62762607ab744e.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 32, "intermediate_size": 37, "num_hidden_layers": 2, "num_attention_heads": 4, "sliding_window": 4096, "num_key_value_heads": 2, "hidden_act": "gelu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "is_decoder": true, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "attention_probs_dropout_prob": 0.1, "hidden_dropout_prob": 0.1, "model_type": "mistral", "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 2, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/5975d2858f9bce8ef290.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 32, "intermediate_size": 37, "num_hidden_layers": 2, "num_attention_heads": 4, "sliding_window": 4096, "num_key_value_heads": 2, "hidden_act": "gelu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "is_decoder": true, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "attention_probs_dropout_prob": 0.1, "hidden_dropout_prob": 0.1, "model_type": "mistral", "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/9a8b7079ddb8a7611d5a.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 32, "intermediate_size": 37, "num_hidden_layers": 2, "num_attention_heads": 4, "sliding_window": 4096, "num_key_value_heads": 2, "hidden_act": "gelu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "is_decoder": true, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "attention_probs_dropout_prob": 0.1, "hidden_dropout_prob": 0.1, "model_type": "mistral", "type_vocab_size": 16, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/dacorvo/tiny-random-MistralForCausalLM/fbf45e5cda8a458d37d3.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 512, "hidden_size": 32, "intermediate_size": 37, "num_hidden_layers": 2, "num_attention_heads": 4, "sliding_window": 4096, "num_key_value_heads": 2, "hidden_act": "gelu", "initializer_range": 0.02, "rms_norm_eps": 1e-06, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "float32", "tie_word_embeddings": false, "is_decoder": true, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "attention_probs_dropout_prob": 0.1, "hidden_dropout_prob": 0.1, "model_type": "mistral", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "dacorvo/tiny-random-MistralForCausalLM", "checkpoint_revision": "81d453e3c8985649e9ee3d4c9378461029d1c73a", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "type_vocab_size": 16}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/3aead8155eb174dbe347.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 8, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "mistralai/Mistral-7B-Instruct-v0.1", "checkpoint_revision": "9ab9e76e2b09f9f29ea2d56aa5bd139e4445c59e"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/5db6675ddfc8e4d5d6c3.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 8, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "mistralai/Mistral-7B-Instruct-v0.1", "checkpoint_revision": "9ab9e76e2b09f9f29ea2d56aa5bd139e4445c59e"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/b00a671aa7624d706817.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 2, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "mistralai/Mistral-7B-Instruct-v0.1", "checkpoint_revision": "9ab9e76e2b09f9f29ea2d56aa5bd139e4445c59e"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/mistral/mistralai/Mistral-7B-Instruct-v0.1/efae9c5d84d090ec8a1d.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 32768, "hidden_size": 4096, "intermediate_size": 14336, "num_hidden_layers": 32, "num_attention_heads": 32, "sliding_window": 4096, "num_key_value_heads": 8, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "use_cache": true, "rope_theta": 10000.0, "attention_dropout": 0.0, "torch_dtype": "bfloat16", "tie_word_embeddings": false, "architectures": ["MistralForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "mistral", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "bf16", "sequence_length": 2048, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "mistralai/Mistral-7B-Instruct-v0.1", "checkpoint_revision": "9ab9e76e2b09f9f29ea2d56aa5bd139e4445c59e"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/0128c1baa44ad491d0f4.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"torch_dtype": "float32", "is_decoder": true, "architectures": ["OPTForCausalLM"], "bos_token_id": 2, "pad_token_id": 1, "eos_token_id": 2, "embed_dim": 16, "model_type": "opt", "neuron": {"auto_cast_type": "fp32", "batch_size": 1, "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f", "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "num_cores": 2, "sequence_length": 100, "task": "text-generation"}, "vocab_size": 50265, "max_position_embeddings": 100, "num_attention_heads": 4, "word_embed_proj_dim": 16, "ffn_dim": 4, "hidden_size": 16, "num_hidden_layers": 5, "dropout": 0.1, "attention_dropout": 0.1, "activation_function": "relu", "init_std": 0.02, "layerdrop": 0.0, "use_cache": true, "do_layer_norm_before": true, "enable_bias": true, "layer_norm_elementwise_affine": true, "_remove_final_layer_norm": false}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/6f2444cfa075d1f210d0.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"torch_dtype": "float32", "is_decoder": true, "architectures": ["OPTForCausalLM"], "bos_token_id": 2, "pad_token_id": 1, "eos_token_id": 2, "embed_dim": 16, "model_type": "opt", "vocab_size": 50265, "max_position_embeddings": 100, "num_attention_heads": 4, "word_embed_proj_dim": 16, "ffn_dim": 4, "hidden_size": 16, "num_hidden_layers": 5, "dropout": 0.1, "attention_dropout": 0.1, "activation_function": "relu", "init_std": 0.02, "layerdrop": 0.0, "use_cache": true, "do_layer_norm_before": true, "enable_bias": true, "layer_norm_elementwise_affine": true, "_remove_final_layer_norm": false, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/ea89a1b8198a05d1e71d.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"torch_dtype": "float32", "is_decoder": true, "architectures": ["OPTForCausalLM"], "bos_token_id": 2, "pad_token_id": 1, "eos_token_id": 2, "embed_dim": 16, "model_type": "opt", "vocab_size": 50265, "max_position_embeddings": 100, "num_attention_heads": 4, "word_embed_proj_dim": 16, "ffn_dim": 4, "hidden_size": 16, "num_hidden_layers": 5, "dropout": 0.1, "attention_dropout": 0.1, "activation_function": "relu", "init_std": 0.02, "layerdrop": 0.0, "use_cache": true, "do_layer_norm_before": true, "enable_bias": true, "layer_norm_elementwise_affine": true, "_remove_final_layer_norm": false, "neuron": {"task": "text-generation", "batch_size": 2, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.18/opt/hf-internal-testing/tiny-random-OPTForCausalLM/f3ff2a63bf38db3d9595.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"torch_dtype": "float32", "is_decoder": true, "architectures": ["OPTForCausalLM"], "bos_token_id": 2, "pad_token_id": 1, "eos_token_id": 2, "embed_dim": 16, "model_type": "opt", "vocab_size": 50265, "max_position_embeddings": 100, "num_attention_heads": 4, "word_embed_proj_dim": 16, "ffn_dim": 4, "hidden_size": 16, "num_hidden_layers": 5, "dropout": 0.1, "attention_dropout": 0.1, "activation_function": "relu", "init_std": 0.02, "layerdrop": 0.0, "use_cache": true, "do_layer_norm_before": true, "enable_bias": true, "layer_norm_elementwise_affine": true, "_remove_final_layer_norm": false, "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp32", "sequence_length": 100, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "hf-internal-testing/tiny-random-OPTForCausalLM", "checkpoint_revision": "190d1f4fc0011d2eaeaa05282e0fbd2445e4b11f"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-13b-chat-hf/190a7b1844748ebd6f79.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 5120, "intermediate_size": 13824, "num_hidden_layers": 40, "num_attention_heads": 40, "num_key_value_heads": 40, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "NousResearch/Llama-2-13b-chat-hf", "checkpoint_revision": "aad3c0b2315a458d54cd9b09ddc51cf06021196f"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-13b-chat-hf/b39207446be340f95f2a.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 5120, "intermediate_size": 13824, "num_hidden_layers": 40, "num_attention_heads": 40, "num_key_value_heads": 40, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "NousResearch/Llama-2-13b-chat-hf", "checkpoint_revision": "aad3c0b2315a458d54cd9b09ddc51cf06021196f"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-13b-chat-hf/c5566dad938f8351a225.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 5120, "intermediate_size": 13824, "num_hidden_layers": 40, "num_attention_heads": 40, "num_key_value_heads": 40, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 8, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "NousResearch/Llama-2-13b-chat-hf", "checkpoint_revision": "aad3c0b2315a458d54cd9b09ddc51cf06021196f"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/NousResearch/Llama-2-7b-chat-hf/3671f4f6e9fb55e4d4fd.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "pad_token_id": 0, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "NousResearch/Llama-2-7b-chat-hf", "checkpoint_revision": "37892f30c23786c0d5367d80481fa0d9fba93cf8"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/11e18115af2cd459e483.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/15e8c952b3c81fd87a57.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 24, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/25d78386bf1607f07ae3.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 24, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/3172e154191b4ca8bee2.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 2, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/348c549d4253f4d0d1e3.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 1, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|
neuronxcc-2.12.68.0+4480452af/0_REGISTRY/0.0.19.dev0/inference/llama/meta-llama/Llama-2-7b-chat-hf/443c93e614cf44693e97.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"vocab_size": 32000, "max_position_embeddings": 4096, "hidden_size": 4096, "intermediate_size": 11008, "num_hidden_layers": 32, "num_attention_heads": 32, "num_key_value_heads": 32, "hidden_act": "silu", "initializer_range": 0.02, "rms_norm_eps": 1e-05, "pretraining_tp": 1, "use_cache": true, "rope_theta": 10000.0, "rope_scaling": null, "attention_bias": false, "attention_dropout": 0.0, "torch_dtype": "float16", "tie_word_embeddings": false, "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "neuron": {"task": "text-generation", "batch_size": 4, "num_cores": 8, "auto_cast_type": "fp16", "sequence_length": 4096, "compiler_type": "neuronx-cc", "compiler_version": "2.12.68.0+4480452af", "checkpoint_id": "meta-llama/Llama-2-7b-chat-hf", "checkpoint_revision": "c1b0db933684edbfe29a06fa47eb19cc48025e93"}}
|
|
|
|