liang-huggingface commited on
Commit
46605b1
·
1 Parent(s): ef0f098

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -10,9 +10,10 @@ HF_API = os.getenv('HF_API')
10
  from transformers import AutoModelForCausalLM, AutoTokenizer
11
  import torch
12
 
13
- # Load the model and tokenizer
14
- tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-7B-Chat", trust_remote_code=True)
15
- model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen-7B-Chat", device_map="auto",trust_remote_code=True).eval()
 
16
 
17
  def generate_summary(prompt):
18
  # Add instructions to the prompt to signal that you want a summary
@@ -63,7 +64,7 @@ def search_pubmed(query, retmax):
63
  return pd.DataFrame(article_list)
64
 
65
  # Function to summarize articles using Hugging Face's API
66
- def summarize_with_huggingface(model, selected_articles, USE_LOCAL=True):
67
  API_URL = f"https://api-inference.huggingface.co/models/{model}"
68
  # Your Hugging Face API key
69
  API_KEY = HF_API
 
10
  from transformers import AutoModelForCausalLM, AutoTokenizer
11
  import torch
12
 
13
+ if False:
14
+ # Load the model and tokenizer
15
+ tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-7B-Chat", trust_remote_code=True)
16
+ model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen-7B-Chat", device_map="auto",trust_remote_code=True).eval()
17
 
18
  def generate_summary(prompt):
19
  # Add instructions to the prompt to signal that you want a summary
 
64
  return pd.DataFrame(article_list)
65
 
66
  # Function to summarize articles using Hugging Face's API
67
+ def summarize_with_huggingface(model, selected_articles, USE_LOCAL=False):
68
  API_URL = f"https://api-inference.huggingface.co/models/{model}"
69
  # Your Hugging Face API key
70
  API_KEY = HF_API