Spaces:
Runtime error
Runtime error
import streamlit as st | |
from openai import OpenAI | |
from config import config | |
import requests | |
from prompts import SYSTEM_PROMPT | |
def setup_page(): | |
"""Set up the Streamlit app's page configuration.""" | |
st.set_page_config( | |
page_title="Ollama Chatbot", | |
page_icon="🤖", | |
layout="wide", | |
initial_sidebar_state="auto", | |
) | |
def initialize_session_state(): | |
"""Initialize session state variables.""" | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
if "ollama_server" not in st.session_state: | |
st.session_state.ollama_server = config.OLLAMA_SERVER | |
def get_ollama_client(): | |
"""Initialize the OpenAI client for Ollama.""" | |
base_url = f"{st.session_state.ollama_server}/v1/" | |
return OpenAI(base_url=base_url, api_key="ollama") | |
def create_message(role, content): | |
"""Create a chat message dictionary.""" | |
return {"role": role, "content": content} | |
def fetch_chat_response(client, messages, model): | |
"""Fetch a response from the OpenAI client.""" | |
try: | |
response = client.chat.completions.create(messages=messages, model=model) | |
return response.choices[0].message.content | |
except Exception as e: | |
st.error(f"Error generating response: {e}") | |
return None | |
def fetch_available_models(server_url): | |
"""Fetch available models from the Ollama server.""" | |
url = f"{server_url}/api/tags" | |
try: | |
response = requests.get(url) | |
response.raise_for_status() | |
return [item["model"] for item in response.json().get("models", [])] | |
except requests.RequestException as e: | |
st.error(f"Error fetching models: {e}") | |
return [] | |
def render_chat_messages(): | |
"""Display chat messages in the UI.""" | |
for message in st.session_state.messages: | |
role = message["role"] | |
content = message["content"] | |
if role == "user": | |
st.chat_message("user").write(content) | |
elif role == "assistant": | |
st.chat_message("assistant").write(content) | |
def handle_user_input(client, model): | |
"""Handle user input and process responses.""" | |
if user_input := st.chat_input("Say something"): | |
# Add system prompt if it's the first message | |
if not st.session_state.messages: | |
st.session_state.messages.append(create_message("system", SYSTEM_PROMPT)) | |
# Add user input to chat | |
st.session_state.messages.append(create_message("user", user_input)) | |
st.chat_message("user").write(user_input) | |
# Get assistant response | |
response = fetch_chat_response(client, st.session_state.messages, model) | |
if response: | |
st.session_state.messages.append(create_message("assistant", response)) | |
st.chat_message("assistant").write(response) | |
def clear_chat(): | |
"""Clear all chat messages.""" | |
st.session_state.messages = [] | |
def sanitize_url(url): | |
"""Remove trailing slash from a URL if present.""" | |
return url.rstrip("/") if url.endswith("/") else url | |
def main(): | |
"""Main function to run the chatbot application.""" | |
# Setup page and session state | |
setup_page() | |
initialize_session_state() | |
# Initialize client | |
client = get_ollama_client() | |
# App Title | |
st.title("Ollama Chatbot") | |
st.sidebar.title("Settings") | |
# Sidebar: Server and Models | |
ollama_server = st.sidebar.text_input("Ollama Server", value=st.session_state.ollama_server) | |
models = fetch_available_models(ollama_server) | |
if not models: | |
st.error("No models available. Please check the server connection.") | |
return | |
model = st.sidebar.selectbox("Select a Model", models) | |
# Sidebar: System Prompt | |
global SYSTEM_PROMPT | |
updated_prompt = st.sidebar.text_area("System Prompt", value=SYSTEM_PROMPT) | |
if st.sidebar.button("Update settings", use_container_width=True): | |
st.session_state.ollama_server = sanitize_url(ollama_server) | |
SYSTEM_PROMPT = updated_prompt | |
# Update the system message if it exists, otherwise add it | |
if st.session_state.messages and st.session_state.messages[0]["role"] == "system": | |
st.session_state.messages[0]["content"] = SYSTEM_PROMPT | |
elif not st.session_state.messages: | |
st.session_state.messages.insert(0, create_message("system", SYSTEM_PROMPT)) | |
if st.sidebar.button("Clear Chat", use_container_width=True): | |
clear_chat() | |
# Main Chat Interface | |
render_chat_messages() | |
handle_user_input(client, model) | |
if __name__ == "__main__": | |
main() |