Update app.py
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ import gradio as gr
|
|
| 6 |
import google.generativeai as genai
|
| 7 |
from huggingface_hub import HfApi
|
| 8 |
from langdetect import detect
|
| 9 |
-
from
|
| 10 |
|
| 11 |
# ==== 設定 ====
|
| 12 |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
|
@@ -23,6 +23,7 @@ if not HF_USERNAME:
|
|
| 23 |
|
| 24 |
genai.configure(api_key=GOOGLE_API_KEY)
|
| 25 |
model = genai.GenerativeModel(model_name='gemini-2.0-flash')
|
|
|
|
| 26 |
api = HfApi()
|
| 27 |
|
| 28 |
memory_list = []
|
|
@@ -31,6 +32,7 @@ BASE_SYSTEM_PROMPT = (
|
|
| 31 |
"ユーザーの要求に応じてテキストと画像に対応し、メモリを適切に管理してください。"
|
| 32 |
)
|
| 33 |
|
|
|
|
| 34 |
def save_to_database(history, memory):
|
| 35 |
data = {
|
| 36 |
"timestamp": time.time(),
|
|
@@ -49,15 +51,15 @@ def save_to_database(history, memory):
|
|
| 49 |
)
|
| 50 |
os.remove(file_name)
|
| 51 |
|
|
|
|
| 52 |
def generate_response(message, history, temperature, top_p, top_k, max_output_tokens, image, lang, use_memories):
|
| 53 |
global memory_list
|
| 54 |
|
| 55 |
detected_lang = detect(message)
|
| 56 |
-
# deep-translator に置換
|
| 57 |
if lang == "English" and detected_lang != "en":
|
| 58 |
-
message =
|
| 59 |
elif lang == "Chinese" and detected_lang != "zh-cn":
|
| 60 |
-
message =
|
| 61 |
|
| 62 |
active_memories = [m for m in memory_list if m not in use_memories]
|
| 63 |
|
|
@@ -91,29 +93,32 @@ def generate_response(message, history, temperature, top_p, top_k, max_output_to
|
|
| 91 |
save_to_database(history, memory_list)
|
| 92 |
return "", history, history, memory_list, None # 画像欄を空に
|
| 93 |
|
|
|
|
| 94 |
def add_memory(new_mem):
|
| 95 |
if new_mem.strip():
|
| 96 |
memory_list.append(new_mem.strip())
|
| 97 |
-
return memory_list
|
| 98 |
|
| 99 |
def edit_memory(memories):
|
| 100 |
global memory_list
|
| 101 |
memory_list = [m.strip() for m in memories if m.strip()]
|
| 102 |
-
return memory_list
|
| 103 |
|
| 104 |
def delete_selected_memory(selected):
|
| 105 |
global memory_list
|
| 106 |
memory_list = [m for m in memory_list if m not in selected]
|
| 107 |
-
return memory_list
|
| 108 |
|
| 109 |
-
|
|
|
|
| 110 |
gr.Markdown("## Gemini Chatbot - Image + Memory UI + HF Save")
|
| 111 |
|
| 112 |
with gr.Tabs():
|
| 113 |
with gr.Tab("Chat"):
|
| 114 |
-
chatbot = gr.Chatbot()
|
| 115 |
msg = gr.Textbox(placeholder="メッセージを入力...")
|
| 116 |
-
|
|
|
|
| 117 |
state = gr.State([])
|
| 118 |
mem_state = gr.State([])
|
| 119 |
|
|
@@ -161,4 +166,4 @@ demo.launch(
|
|
| 161 |
server_name="0.0.0.0",
|
| 162 |
server_port=int(os.environ.get("PORT", 7860)),
|
| 163 |
share=False
|
| 164 |
-
)
|
|
|
|
| 6 |
import google.generativeai as genai
|
| 7 |
from huggingface_hub import HfApi
|
| 8 |
from langdetect import detect
|
| 9 |
+
from googletrans import Translator
|
| 10 |
|
| 11 |
# ==== 設定 ====
|
| 12 |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
|
|
|
| 23 |
|
| 24 |
genai.configure(api_key=GOOGLE_API_KEY)
|
| 25 |
model = genai.GenerativeModel(model_name='gemini-2.0-flash')
|
| 26 |
+
translator = Translator()
|
| 27 |
api = HfApi()
|
| 28 |
|
| 29 |
memory_list = []
|
|
|
|
| 32 |
"ユーザーの要求に応じてテキストと画像に対応し、メモリを適切に管理してください。"
|
| 33 |
)
|
| 34 |
|
| 35 |
+
# ==== データ保存 ====
|
| 36 |
def save_to_database(history, memory):
|
| 37 |
data = {
|
| 38 |
"timestamp": time.time(),
|
|
|
|
| 51 |
)
|
| 52 |
os.remove(file_name)
|
| 53 |
|
| 54 |
+
# ==== 生成関数 ====
|
| 55 |
def generate_response(message, history, temperature, top_p, top_k, max_output_tokens, image, lang, use_memories):
|
| 56 |
global memory_list
|
| 57 |
|
| 58 |
detected_lang = detect(message)
|
|
|
|
| 59 |
if lang == "English" and detected_lang != "en":
|
| 60 |
+
message = translator.translate(message, dest="en").text
|
| 61 |
elif lang == "Chinese" and detected_lang != "zh-cn":
|
| 62 |
+
message = translator.translate(message, dest="zh-cn").text
|
| 63 |
|
| 64 |
active_memories = [m for m in memory_list if m not in use_memories]
|
| 65 |
|
|
|
|
| 93 |
save_to_database(history, memory_list)
|
| 94 |
return "", history, history, memory_list, None # 画像欄を空に
|
| 95 |
|
| 96 |
+
# ==== メモリ操作 ====
|
| 97 |
def add_memory(new_mem):
|
| 98 |
if new_mem.strip():
|
| 99 |
memory_list.append(new_mem.strip())
|
| 100 |
+
return "\n".join(memory_list)
|
| 101 |
|
| 102 |
def edit_memory(memories):
|
| 103 |
global memory_list
|
| 104 |
memory_list = [m.strip() for m in memories if m.strip()]
|
| 105 |
+
return "\n".join(memory_list)
|
| 106 |
|
| 107 |
def delete_selected_memory(selected):
|
| 108 |
global memory_list
|
| 109 |
memory_list = [m for m in memory_list if m not in selected]
|
| 110 |
+
return "\n".join(memory_list)
|
| 111 |
|
| 112 |
+
# ==== UI構築 ====
|
| 113 |
+
with gr.Blocks() as demo:
|
| 114 |
gr.Markdown("## Gemini Chatbot - Image + Memory UI + HF Save")
|
| 115 |
|
| 116 |
with gr.Tabs():
|
| 117 |
with gr.Tab("Chat"):
|
| 118 |
+
chatbot = gr.Chatbot(type="messages") # type指定で警告回避
|
| 119 |
msg = gr.Textbox(placeholder="メッセージを入力...")
|
| 120 |
+
# 編集機能なしで画像アップロードのみ
|
| 121 |
+
img_upload = gr.Image(type="filepath", label="画像アップロード (プレビュー可)")
|
| 122 |
state = gr.State([])
|
| 123 |
mem_state = gr.State([])
|
| 124 |
|
|
|
|
| 166 |
server_name="0.0.0.0",
|
| 167 |
server_port=int(os.environ.get("PORT", 7860)),
|
| 168 |
share=False
|
| 169 |
+
)
|