File size: 1,663 Bytes
f2797c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import torch
from torch import nn
from transformers import AutoModel, PreTrainedModel, PretrainedConfig


class MultiTaskUnixCoderConfig(PretrainedConfig):
    model_type = "multi_task_unixcoder"
    
    def __init__(self, num_cwe_classes=12, **kwargs):
        super().__init__(**kwargs)
        self.num_cwe_classes = num_cwe_classes


class MultiTaskUnixCoder(PreTrainedModel):
    config_class = MultiTaskUnixCoderConfig
    base_model_prefix = "base"
    
    def __init__(self, config):
        super().__init__(config)
        self.base = AutoModel.from_pretrained("microsoft/unixcoder-base")
        self.vul_head = nn.Linear(768, 2) 
        self.cwe_head = nn.Linear(768, config.num_cwe_classes)  
    
    def forward(self, input_ids, attention_mask=None, labels_vul=None, labels_cwe=None):
        outputs = self.base(input_ids=input_ids, attention_mask=attention_mask)
        hidden_state = outputs.last_hidden_state[:, 0, :]  # CLS token representation
        
        vul_logits = self.vul_head(hidden_state)
        cwe_logits = self.cwe_head(hidden_state)
        
        loss = None
        if labels_vul is not None and labels_cwe is not None:
            vul_loss = nn.CrossEntropyLoss()(vul_logits, labels_vul)
            
            mask = labels_vul == 1
            if torch.any(mask):
                cwe_loss = nn.CrossEntropyLoss()(cwe_logits[mask], labels_cwe[mask])
                loss = vul_loss + 0.5 * cwe_loss  
            else:
                loss = vul_loss

        return {"loss": loss, "vul_logits": vul_logits, "cwe_logits": cwe_logits} if loss is not None else {"vul_logits": vul_logits, "cwe_logits": cwe_logits}