簡嘉琳 commited on
Commit
83fff6c
·
1 Parent(s): 65b175d

Add HF token for private model access

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
 
4
 
5
  # 預先定義 Hugging Face 模型
6
  MODEL_NAMES = {
@@ -8,11 +9,15 @@ MODEL_NAMES = {
8
  "DeepSeek-R1": "deepseek-ai/DeepSeek-R1",
9
  }
10
 
 
 
11
 
12
  def load_model(model_path):
13
- tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
 
 
14
  model = AutoModelForCausalLM.from_pretrained(
15
- model_path, torch_dtype=torch.float16, trust_remote_code=True
16
  ).cuda()
17
  return model, tokenizer
18
 
 
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
+ import os
5
 
6
  # 預先定義 Hugging Face 模型
7
  MODEL_NAMES = {
 
9
  "DeepSeek-R1": "deepseek-ai/DeepSeek-R1",
10
  }
11
 
12
+ HF_TOKEN = os.getenv("HF_TOKEN")
13
+
14
 
15
  def load_model(model_path):
16
+ tokenizer = AutoTokenizer.from_pretrained(
17
+ model_path, trust_remote_code=True, token=HF_TOKEN
18
+ )
19
  model = AutoModelForCausalLM.from_pretrained(
20
+ model_path, torch_dtype=torch.float16, trust_remote_code=True, token=HF_TOKEN
21
  ).cuda()
22
  return model, tokenizer
23