from transformers import AutoModelForSequenceClassification, Trainer, TrainingArguments
from peft import LoraConfig, get_peft_model
from datasets import load_dataset
model = AutoModelForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=2)
lora_config = LoraConfig(
r=8,
lora_alpha=16,
lora_dropout=0.1,
bias="none"
)
model = get_peft_model(model, lora_config)
dataset = load_dataset("glue", "sst2")
train_dataset = dataset["train"]
training_args = TrainingArguments(
output_dir="./results",
per_device_train_batch_size=16,
num_train_epochs=3,
logging_dir="./logs",
)
trainer = Trainer(
model=model,
args=training_args,
train_dataset=train_dataset,
)
trainer.train()