mahdin70 commited on
Commit
730bbf3
·
verified ·
1 Parent(s): 652a93e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -4
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "intermediate_size": 3072,
14
  "layer_norm_eps": 1e-05,
15
  "max_position_embeddings": 514,
16
- "model_type": "roberta",
17
  "num_attention_heads": 12,
18
  "num_hidden_layers": 9,
19
  "output_past": true,
@@ -24,7 +24,10 @@
24
  "transformers_version": "4.47.0",
25
  "type_vocab_size": 1,
26
  "use_cache": true,
27
- "vocab_size": 51416,
28
- "num_cwe_classes": 107,
29
- "vul_class_weights": [0.546, 5.806]
 
 
 
30
  }
 
13
  "intermediate_size": 3072,
14
  "layer_norm_eps": 1e-05,
15
  "max_position_embeddings": 514,
16
+ "model_type": "multi_task_unixcoder",
17
  "num_attention_heads": 12,
18
  "num_hidden_layers": 9,
19
  "output_past": true,
 
24
  "transformers_version": "4.47.0",
25
  "type_vocab_size": 1,
26
  "use_cache": true,
27
+ "vocab_size": 50265,
28
+ "num_cwe_classes": 106,
29
+ "auto_map": {
30
+ "AutoConfig": "modeling_multi_task_unixcoder.MultiTaskUnixCoderConfig",
31
+ "AutoModel": "modeling_multi_task_unixcoder.MultiTaskUnixCoder"
32
+ }
33
  }