UnixCoder-VulnCWE / modeling_multi_task_unixcoder.py
mahdin70's picture
Create modeling_multi_task_unixcoder.py
f2797c6 verified
import torch
from torch import nn
from transformers import AutoModel, PreTrainedModel, PretrainedConfig
class MultiTaskUnixCoderConfig(PretrainedConfig):
model_type = "multi_task_unixcoder"
def __init__(self, num_cwe_classes=12, **kwargs):
super().__init__(**kwargs)
self.num_cwe_classes = num_cwe_classes
class MultiTaskUnixCoder(PreTrainedModel):
config_class = MultiTaskUnixCoderConfig
base_model_prefix = "base"
def __init__(self, config):
super().__init__(config)
self.base = AutoModel.from_pretrained("microsoft/unixcoder-base")
self.vul_head = nn.Linear(768, 2)
self.cwe_head = nn.Linear(768, config.num_cwe_classes)
def forward(self, input_ids, attention_mask=None, labels_vul=None, labels_cwe=None):
outputs = self.base(input_ids=input_ids, attention_mask=attention_mask)
hidden_state = outputs.last_hidden_state[:, 0, :] # CLS token representation
vul_logits = self.vul_head(hidden_state)
cwe_logits = self.cwe_head(hidden_state)
loss = None
if labels_vul is not None and labels_cwe is not None:
vul_loss = nn.CrossEntropyLoss()(vul_logits, labels_vul)
mask = labels_vul == 1
if torch.any(mask):
cwe_loss = nn.CrossEntropyLoss()(cwe_logits[mask], labels_cwe[mask])
loss = vul_loss + 0.5 * cwe_loss
else:
loss = vul_loss
return {"loss": loss, "vul_logits": vul_logits, "cwe_logits": cwe_logits} if loss is not None else {"vul_logits": vul_logits, "cwe_logits": cwe_logits}