File size: 1,163 Bytes
6ca7132 d8ffac7 6ca7132 d8ffac7 6ca7132 2e39642 6ca7132 2e39642 6ca7132 2e39642 6ca7132 2e39642 6ca7132 2e39642 6ca7132 2e39642 6ca7132 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
{
"_name_or_path": "/jxm/cde/cde-small-v2/checkpoint-2635",
"architecture": "transductive",
"architectures": [
"ContextualDocumentEmbeddingTransformer"
],
"attn_implementation": null,
"auto_map": {
"AutoConfig": "model.ContextualModelConfig",
"AutoModel": "model.ContextualDocumentEmbeddingTransformer"
},
"autoregressive_backbone": false,
"cache_dir": null,
"config_name": null,
"dataset_backbone": null,
"disable_dropout": true,
"disable_transductive_rotary_embedding": true,
"embedder": "answerdotai/ModernBERT-base",
"embedder_rerank": "sentence-transformers/gtr-t5-base",
"embedding_output_dim": null,
"limit_layers": null,
"limit_layers_first_stage": null,
"logit_scale": 50.0,
"max_seq_length": 512,
"model_revision": "main",
"pool_ignore_contextual_tokens": true,
"pool_ignore_instruction_tokens": true,
"pooling_strategy": "mean",
"tokenizer_name": null,
"torch_dtype": "float32",
"transductive_corpus_size": 512,
"transductive_sequence_dropout_prob": 0.0,
"transductive_tie_token_embeddings": false,
"transductive_tokens_per_document": 1,
"transformers_version": "4.48.0.dev0"
}
|