|
--- |
|
library_name: peft |
|
base_model: mistralai/Mistral-7B-v0.1 |
|
--- |
|
|
|
``` |
|
!pip install peft accelerate bitsandbytes |
|
``` |
|
|
|
``` |
|
from peft import PeftModel, PeftConfig |
|
from transformers import AutoModelForCausalLM |
|
from transformers import AutoTokenizer |
|
adapater_path = "marekk/Lemma-Mistral-7b-Adapter" |
|
config = PeftConfig.from_pretrained(adapater_path) |
|
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, load_in_4bit=True) |
|
model = PeftModel.from_pretrained(model, adapater_path) |
|
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path) |
|
``` |
|
|
|
``` |
|
from transformers import pipeline |
|
generator = pipeline('text-generation', model = model, tokenizer=tokenizer) |
|
generator('Render a list by altering each string to its basic lemma form of sport Teams, players and leagues. List:["Slavii", "Spartě", "Olomoucké"]', max_length = 100, num_return_sequences=1 , return_full_text=False, temperature=0.1) |
|
``` |
|
|
|
|
|
### Framework versions |
|
|
|
- PEFT 0.8.2 |