BoghdadyJR commited on
Commit
5d7e3bb
·
verified ·
1 Parent(s): 30465fa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -1,9 +1,18 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
  from typing import List, Tuple
 
 
4
 
5
- # Initialize the model
6
- generator = pipeline("text-generation", model="BoghdadyJR/med")
 
 
 
 
 
 
 
7
 
8
  # Shortened system prompt
9
  system_prompt="""
 
1
  import gradio as gr
2
  from transformers import pipeline
3
  from typing import List, Tuple
4
+ from peft import PeftModel, PeftConfig
5
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
6
 
7
+ peft_model_id = "BoghdadyJR/med"
8
+ config = PeftConfig.from_pretrained(peft_model_id)
9
+ model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, device_map='auto')
10
+ tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
11
+
12
+ # Load the Lora model
13
+ model = PeftModel.from_pretrained(model, peft_model_id)
14
+
15
+ generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
16
 
17
  # Shortened system prompt
18
  system_prompt="""