ettin-dec-from-enc-1b / config.json
orionweller's picture
Add checkpoint for step548
55feeb7 verified
{
"_name_or_path": "ettin-dec-from-enc-1b",
"architectures": [
"ModernBertDecoderForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"bos_token_id": 50281,
"classifier_activation": "gelu",
"classifier_bias": false,
"classifier_dropout": 0.0,
"classifier_pooling": "mean",
"cls_token_id": 50281,
"decoder_bias": true,
"deterministic_flash_attn": false,
"embedding_dropout": 0.0,
"eos_token_id": 50282,
"global_attn_every_n_layers": 3,
"global_rope_theta": 160000.0,
"gradient_checkpointing": false,
"hidden_activation": "gelu",
"hidden_size": 1792,
"initializer_cutoff_factor": 2.0,
"initializer_range": 0.02,
"intermediate_size": 3840,
"layer_norm_eps": 1e-05,
"local_attention": 128,
"local_rope_theta": 160000.0,
"max_position_embeddings": 7999,
"mlp_bias": false,
"mlp_dropout": 0.0,
"model_type": "modernbert-decoder",
"norm_bias": false,
"norm_eps": 1e-05,
"num_attention_heads": 28,
"num_hidden_layers": 28,
"pad_token_id": 50283,
"position_embedding_type": "sans_pos",
"sep_token_id": 50282,
"tie_word_embeddings": true,
"torch_dtype": "float32",
"transformers_version": "4.47.0.dev0",
"vocab_size": 50368,
"is_causal": true,
"masked_prediction": false,
"causal_mask": true,
"eos_token": "[SEP]",
"bos_token": "[CLS]",
"tokenizer_class": "PreTrainedTokenizerFast",
"unk_token": "[UNK]",
"use_cache": true,
"layer_types": [
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"full_attention"
]
}