Update app.py
Browse files
app.py
CHANGED
@@ -11,17 +11,16 @@ MODEL_NAMES = {
|
|
11 |
|
12 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
13 |
|
14 |
-
|
15 |
def load_model(model_path):
|
16 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
17 |
-
model_path, trust_remote_code=True, token=HF_TOKEN
|
18 |
-
)
|
19 |
model = AutoModelForCausalLM.from_pretrained(
|
20 |
-
model_path,
|
21 |
-
|
|
|
|
|
|
|
22 |
return model, tokenizer
|
23 |
|
24 |
-
|
25 |
# 預設載入 DeepSeek-V3
|
26 |
current_model, current_tokenizer = load_model("deepseek-ai/DeepSeek-V3")
|
27 |
|
|
|
11 |
|
12 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
13 |
|
|
|
14 |
def load_model(model_path):
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True, token=HF_TOKEN)
|
|
|
|
|
16 |
model = AutoModelForCausalLM.from_pretrained(
|
17 |
+
model_path,
|
18 |
+
trust_remote_code=True,
|
19 |
+
token=HF_TOKEN,
|
20 |
+
device_map="auto", # 讓 transformers 自動決定使用 CPU/GPU
|
21 |
+
)
|
22 |
return model, tokenizer
|
23 |
|
|
|
24 |
# 預設載入 DeepSeek-V3
|
25 |
current_model, current_tokenizer = load_model("deepseek-ai/DeepSeek-V3")
|
26 |
|