Spaces:
Sleeping
Sleeping
Create township_chatbot.py
Browse files- township_chatbot.py +48 -0
township_chatbot.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import subprocess
|
2 |
+
import gradio as gr
|
3 |
+
|
4 |
+
def chat_with_ollama(user_message, chat_history):
|
5 |
+
"""
|
6 |
+
Sends the full chat history + current user message to Ollama model,
|
7 |
+
captures the response, and appends it to the history.
|
8 |
+
"""
|
9 |
+
# Prepare prompt by joining past conversation + current message
|
10 |
+
prompt = ""
|
11 |
+
for i, (user, bot) in enumerate(chat_history):
|
12 |
+
prompt += f"User: {user}\nAssistant: {bot}\n"
|
13 |
+
prompt += f"User: {user_message}\nAssistant:"
|
14 |
+
|
15 |
+
# Call Ollama CLI to get model response
|
16 |
+
try:
|
17 |
+
result = subprocess.run(
|
18 |
+
["ollama", "run", "township_business_growth_coach", prompt],
|
19 |
+
capture_output=True,
|
20 |
+
text=True,
|
21 |
+
timeout=30
|
22 |
+
)
|
23 |
+
if result.returncode == 0:
|
24 |
+
bot_reply = result.stdout.strip()
|
25 |
+
else:
|
26 |
+
bot_reply = f"Error from Ollama: {result.stderr.strip()}"
|
27 |
+
except Exception as e:
|
28 |
+
bot_reply = f"Exception: {str(e)}"
|
29 |
+
|
30 |
+
# Update chat history
|
31 |
+
chat_history.append((user_message, bot_reply))
|
32 |
+
|
33 |
+
# Format output as list of dicts for chat UI
|
34 |
+
chat_formatted = [{"User": u, "Bot": b} for u, b in chat_history]
|
35 |
+
|
36 |
+
return chat_formatted, chat_history
|
37 |
+
|
38 |
+
with gr.Blocks(title="Township Business Growth Coach Chatbot") as demo:
|
39 |
+
gr.Markdown("## 💬 Township Business Growth Coach Chatbot")
|
40 |
+
chatbox = gr.Chatbot(label="Chat History")
|
41 |
+
msg = gr.Textbox(placeholder="Ask your township business growth question here...")
|
42 |
+
state = gr.State([]) # stores history as list of (user, bot) tuples
|
43 |
+
submit = gr.Button("Send")
|
44 |
+
|
45 |
+
submit.click(chat_with_ollama, inputs=[msg, state], outputs=[chatbox, state])
|
46 |
+
msg.submit(chat_with_ollama, inputs=[msg, state], outputs=[chatbox, state])
|
47 |
+
|
48 |
+
demo.launch(server_name="0.0.0.0", server_port=7860)
|