Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -22,7 +22,7 @@ def generate_response(user_input):
|
|
22 |
global chat_history
|
23 |
# 将用户输入添加到对话历史记录
|
24 |
chat_history.append({"role": "user", "content": user_input})
|
25 |
-
|
26 |
# 应用聊天模板
|
27 |
prompt = tokenizer.apply_chat_template(chat_history, tokenize=False)
|
28 |
full_prompt = f"<s>{SYS_PROMPT} [INST] {prompt} [/INST]"
|
@@ -36,26 +36,40 @@ def generate_response(user_input):
|
|
36 |
top_k=50,
|
37 |
repetition_penalty=1.1,
|
38 |
temperature=0.7,
|
|
|
|
|
39 |
)
|
40 |
|
41 |
# 解码输出
|
42 |
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
43 |
-
|
44 |
# 将生成的文本添加到对话历史记录
|
45 |
chat_history.append({"role": "assistant", "content": generated_text})
|
46 |
-
|
|
|
|
|
|
|
|
|
47 |
return generated_text
|
48 |
|
49 |
# 创建Gradio界面
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
# 启动Gradio界面并共享链接
|
61 |
iface.launch(share=True)
|
|
|
22 |
global chat_history
|
23 |
# 将用户输入添加到对话历史记录
|
24 |
chat_history.append({"role": "user", "content": user_input})
|
25 |
+
|
26 |
# 应用聊天模板
|
27 |
prompt = tokenizer.apply_chat_template(chat_history, tokenize=False)
|
28 |
full_prompt = f"<s>{SYS_PROMPT} [INST] {prompt} [/INST]"
|
|
|
36 |
top_k=50,
|
37 |
repetition_penalty=1.1,
|
38 |
temperature=0.7,
|
39 |
+
do_sample=True, # 启用 sample-based 生成模式
|
40 |
+
attention_mask=inputs["attention_mask"], # 设置 attention mask
|
41 |
)
|
42 |
|
43 |
# 解码输出
|
44 |
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
45 |
+
|
46 |
# 将生成的文本添加到对话历史记录
|
47 |
chat_history.append({"role": "assistant", "content": generated_text})
|
48 |
+
|
49 |
+
# 只保留最新的五个问答对话
|
50 |
+
if len(chat_history) > 10:
|
51 |
+
chat_history = chat_history[-10:]
|
52 |
+
|
53 |
return generated_text
|
54 |
|
55 |
# 创建Gradio界面
|
56 |
+
def chat_interface(user_input, history):
|
57 |
+
global chat_history
|
58 |
+
response = generate_response(user_input)
|
59 |
+
history.append((user_input, response))
|
60 |
+
return "", history
|
61 |
+
|
62 |
+
iface = gr.Blocks()
|
63 |
+
|
64 |
+
with iface:
|
65 |
+
gr.Markdown("# 醫療問答助手\n這是一個基於 MediaTek-Research/Breeze-7B-Instruct-v1_0 模型的醫療問答助手。")
|
66 |
+
chatbot = gr.Chatbot()
|
67 |
+
with gr.Row():
|
68 |
+
txt = gr.Textbox(
|
69 |
+
show_label=False,
|
70 |
+
placeholder="請輸入你的問題...",
|
71 |
+
).style(container=False)
|
72 |
+
txt.submit(chat_interface, [txt, chatbot], [txt, chatbot])
|
73 |
|
74 |
# 启动Gradio界面并共享链接
|
75 |
iface.launch(share=True)
|