andrewdalpino commited on
Commit
386286b
·
verified ·
1 Parent(s): a4c2ec1

Upload EsmForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +3 -2
  2. model.safetensors +1 -1
config.json CHANGED
@@ -3,12 +3,12 @@
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.0,
7
  "classifier_dropout": null,
8
  "emb_layer_norm_before": false,
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
- "hidden_dropout_prob": 0.0,
12
  "hidden_size": 640,
13
  "id2label": {
14
  "0": "GO:0071944",
@@ -5933,6 +5933,7 @@
5933
  },
5934
  "layer_norm_eps": 1e-05,
5935
  "mask_token_id": 32,
 
5936
  "max_position_embeddings": 1026,
5937
  "model_type": "esm",
5938
  "num_attention_heads": 20,
 
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
6
+ "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
8
  "emb_layer_norm_before": false,
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
  "hidden_size": 640,
13
  "id2label": {
14
  "0": "GO:0071944",
 
5933
  },
5934
  "layer_norm_eps": 1e-05,
5935
  "mask_token_id": 32,
5936
+ "max_position_embedding": 1026,
5937
  "max_position_embeddings": 1026,
5938
  "model_type": "esm",
5939
  "num_attention_heads": 20,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:47ab17d5633929411d4bc69c5024bf00c3cdbad48a13c7128993d3cba96a4814
3
  size 602825864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb228958f283941d203e4e42b3a36c8f12f3ebae48a15bb873e7d4101dd5917e
3
  size 602825864