mahdin70 commited on
Commit
f2797c6
·
verified ·
1 Parent(s): a0ba26b

Create modeling_multi_task_unixcoder.py

Browse files
Files changed (1) hide show
  1. modeling_multi_task_unixcoder.py +42 -0
modeling_multi_task_unixcoder.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from torch import nn
3
+ from transformers import AutoModel, PreTrainedModel, PretrainedConfig
4
+
5
+
6
+ class MultiTaskUnixCoderConfig(PretrainedConfig):
7
+ model_type = "multi_task_unixcoder"
8
+
9
+ def __init__(self, num_cwe_classes=12, **kwargs):
10
+ super().__init__(**kwargs)
11
+ self.num_cwe_classes = num_cwe_classes
12
+
13
+
14
+ class MultiTaskUnixCoder(PreTrainedModel):
15
+ config_class = MultiTaskUnixCoderConfig
16
+ base_model_prefix = "base"
17
+
18
+ def __init__(self, config):
19
+ super().__init__(config)
20
+ self.base = AutoModel.from_pretrained("microsoft/unixcoder-base")
21
+ self.vul_head = nn.Linear(768, 2)
22
+ self.cwe_head = nn.Linear(768, config.num_cwe_classes)
23
+
24
+ def forward(self, input_ids, attention_mask=None, labels_vul=None, labels_cwe=None):
25
+ outputs = self.base(input_ids=input_ids, attention_mask=attention_mask)
26
+ hidden_state = outputs.last_hidden_state[:, 0, :] # CLS token representation
27
+
28
+ vul_logits = self.vul_head(hidden_state)
29
+ cwe_logits = self.cwe_head(hidden_state)
30
+
31
+ loss = None
32
+ if labels_vul is not None and labels_cwe is not None:
33
+ vul_loss = nn.CrossEntropyLoss()(vul_logits, labels_vul)
34
+
35
+ mask = labels_vul == 1
36
+ if torch.any(mask):
37
+ cwe_loss = nn.CrossEntropyLoss()(cwe_logits[mask], labels_cwe[mask])
38
+ loss = vul_loss + 0.5 * cwe_loss
39
+ else:
40
+ loss = vul_loss
41
+
42
+ return {"loss": loss, "vul_logits": vul_logits, "cwe_logits": cwe_logits} if loss is not None else {"vul_logits": vul_logits, "cwe_logits": cwe_logits}