Molmo-7B-D-0924-hf / config.json
intervitens's picture
Upload folder using huggingface_hub
f00b0ba verified
raw
history blame contribute delete
577 Bytes
{
"architectures": [
"MolmoForConditionalGeneration"
],
"image_token_index": 152069,
"initializer_range": 0.02,
"model_type": "molmo",
"pooling_config": {
"model_type": ""
},
"text_config": {
"attention_bias": true,
"model_type": "molmo_text",
"use_attention_layer_norm": false,
"use_postnorm": false
},
"torch_dtype": "float32",
"transformers_version": "4.48.0.dev0",
"vision_config": {
"model_type": "molmo_vision_model"
},
"vision_feature_layers": [
-2,
-9
],
"vision_feature_select_strategy": "default"
}