Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,11 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
3 |
import gradio as gr
|
4 |
import torch
|
@@ -7,41 +14,34 @@ import re
|
|
7 |
# 加载医学诊断模型
|
8 |
# 修改后(正确)
|
9 |
# from transformers import LlamaForSequenceClassification, LlamaTokenizer
|
10 |
-
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
11 |
import torch
|
12 |
|
13 |
-
#
|
14 |
-
|
15 |
-
config = AutoConfig.from_pretrained("unsloth/DeepSeek-R1-Distill-Llama-8B")
|
16 |
|
17 |
-
# 2.
|
18 |
config.rope_scaling = {
|
19 |
"type": "linear", # 必须字段
|
20 |
-
"factor": 8.0 #
|
21 |
}
|
|
|
|
|
22 |
|
23 |
-
# 3.
|
24 |
-
del config.rope_scaling["high_freq_factor"]
|
25 |
-
del config.rope_scaling["low_freq_factor"]
|
26 |
-
del config.rope_scaling["original_max_position_embeddings"]
|
27 |
-
del config.rope_scaling["rope_type"]
|
28 |
-
|
29 |
-
# ==== 加载模型 ====
|
30 |
model = AutoModelForSequenceClassification.from_pretrained(
|
31 |
"unsloth/DeepSeek-R1-Distill-Llama-8B",
|
32 |
-
config=
|
33 |
-
trust_remote_code=True
|
|
|
34 |
)
|
35 |
|
36 |
-
#
|
37 |
model.load_adapter("yxccai/ds-ai-app")
|
38 |
|
39 |
-
#
|
40 |
tokenizer = AutoTokenizer.from_pretrained("yxccai/ds-ai-app")
|
41 |
|
42 |
-
# ==== 验证配置 ===-
|
43 |
-
print("修正后的RoPE配置:", model.config.rope_scaling) # 应输出: {'type': 'linear', 'factor': 8.0}
|
44 |
-
|
45 |
# 2. 加载你的适配器
|
46 |
# model.load_adapter("yxccai/ds-ai-app") # 替换为你的仓库名
|
47 |
|
|
|
1 |
+
from transformers import LlamaConfig
|
2 |
+
|
3 |
+
class CustomLlamaConfig(LlamaConfig):
|
4 |
+
def _rope_scaling_validation(self):
|
5 |
+
# 完全禁用RoPE参数验证
|
6 |
+
if self.rope_scaling is None:
|
7 |
+
return
|
8 |
+
return # 不执行任何验证
|
9 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
10 |
import gradio as gr
|
11 |
import torch
|
|
|
14 |
# 加载医学诊断模型
|
15 |
# 修改后(正确)
|
16 |
# from transformers import LlamaForSequenceClassification, LlamaTokenizer
|
17 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
18 |
import torch
|
19 |
|
20 |
+
# 1. 加载原始配置并修正
|
21 |
+
config = CustomLlamaConfig.from_pretrained("unsloth/DeepSeek-R1-Distill-Llama-8B")
|
|
|
22 |
|
23 |
+
# 2. 保留必要参数,删除冲突字段
|
24 |
config.rope_scaling = {
|
25 |
"type": "linear", # 必须字段
|
26 |
+
"factor": 8.0 # 从原配置继承
|
27 |
}
|
28 |
+
config_dict = config.to_dict()
|
29 |
+
config_dict.pop("rope_scaling", None) # 完全移除原RoPE参数
|
30 |
|
31 |
+
# 3. 加载模型(绕过验证)
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
model = AutoModelForSequenceClassification.from_pretrained(
|
33 |
"unsloth/DeepSeek-R1-Distill-Llama-8B",
|
34 |
+
config=CustomLlamaConfig(**config_dict),
|
35 |
+
trust_remote_code=True,
|
36 |
+
ignore_mismatched_sizes=True # 关键参数
|
37 |
)
|
38 |
|
39 |
+
# 4. 加载适配器
|
40 |
model.load_adapter("yxccai/ds-ai-app")
|
41 |
|
42 |
+
# 5. 加载分词器
|
43 |
tokenizer = AutoTokenizer.from_pretrained("yxccai/ds-ai-app")
|
44 |
|
|
|
|
|
|
|
45 |
# 2. 加载你的适配器
|
46 |
# model.load_adapter("yxccai/ds-ai-app") # 替换为你的仓库名
|
47 |
|