augini / app.py
vdmbrsv's picture
Update app.py
f1c6649 verified
from __future__ import annotations
import gradio as gr
import pandas as pd
import os
from typing import Optional, Iterable
import sys
from pathlib import Path
from gradio.themes.base import Base
from gradio.themes.utils import colors, fonts, sizes
from augini import Augini
# Create custom dark theme
class AuginiDarkTheme(Base):
def __init__(
self,
*,
primary_hue: colors.Color | str = colors.indigo,
secondary_hue: colors.Color | str = colors.indigo,
neutral_hue: colors.Color | str = colors.gray,
spacing_size: sizes.Size | str = sizes.spacing_md,
radius_size: sizes.Size | str = sizes.radius_lg,
text_size: sizes.Size | str = sizes.text_md,
font: fonts.Font | str | Iterable[fonts.Font | str] = (
fonts.GoogleFont("Inter"),
"ui-sans-serif",
"sans-serif",
),
):
super().__init__(
primary_hue=primary_hue,
secondary_hue=secondary_hue,
neutral_hue=neutral_hue,
spacing_size=spacing_size,
radius_size=radius_size,
text_size=text_size,
font=font,
)
self.name = "augini_dark"
self.set(
# Dark theme colors
body_background_fill="*neutral_950",
body_text_color="*neutral_200",
background_fill_primary="*neutral_900",
background_fill_secondary="*neutral_800",
border_color_primary="*neutral_700",
# Components
block_background_fill="*neutral_900",
block_border_color="*neutral_700",
block_border_width="1px",
block_label_background_fill="*neutral_900",
block_label_text_color="*neutral_200",
block_title_text_color="*neutral_200",
# Buttons
button_primary_background_fill="*primary_600",
button_primary_background_fill_hover="*primary_500",
button_primary_text_color="white",
button_secondary_background_fill="*neutral_700",
button_secondary_background_fill_hover="*neutral_600",
button_secondary_text_color="*neutral_200",
# Inputs
input_background_fill="*neutral_800",
input_background_fill_focus="*neutral_800",
input_border_color="*neutral_700",
input_border_color_focus="*primary_500",
input_placeholder_color="*neutral_500",
# Shadows and effects
shadow_spread="1px",
block_shadow="0 1px 2px 0 rgb(0 0 0 / 0.05)",
button_shadow="0 1px 2px 0 rgb(0 0 0 / 0.05)",
)
class AuginiChat:
def __init__(self, model: str, temperature: float = 0.7):
self.df: Optional[pd.DataFrame] = None
self.model = model
self.temperature = temperature
# Initialize Augini with the API key directly
self.augini = Augini(
api_key=os.environ.get('OPENROUTER_TOKEN'),
use_openrouter=True,
model=self.model,
temperature=self.temperature,
max_tokens=1500,
)
def upload_file(self, file) -> str:
"""Handle file upload and return preview"""
try:
if file is None:
return "Please upload a file"
file_path = file.name
file_extension = os.path.splitext(file_path)[1].lower()
# Read the file based on its extension
if file_extension == '.csv':
self.df = pd.read_csv(file_path)
elif file_extension in ['.xlsx', '.xls']:
self.df = pd.read_excel(file_path)
else:
return "❌ Unsupported file format. Please upload a CSV or Excel file."
return "βœ… File uploaded successfully!"
except Exception as e:
return f"❌ Error uploading file: {str(e)}"
def chat_with_data(self, message: str, history: list) -> tuple[str, list]:
"""Process chat messages and return responses"""
try:
if not message or message.strip() == "":
return "", history
if self.df is None:
return "", history + [(message, "⚠️ Please upload a CSV file first.")]
# Get response from Augini
response = self.augini.chat(message, self.df)
# Update history and clear the message
new_history = history + [(message, response)]
return "", new_history
except Exception as e:
error_msg = f"❌ Error processing message: {str(e)}"
return "", history + [(message, error_msg)]
def update_model_settings(self, model_name: str, temperature: float) -> None:
"""Update the model settings and reinitialize Augini."""
self.model = model_name
self.temperature = temperature
self.augini = Augini(
api_key=os.environ.get('OPENROUTER_TOKEN'),
use_openrouter=True,
model=self.model,
temperature=self.temperature,
)
def create_app():
# Initialize the chat handler with default settings
chat_handler = AuginiChat(model='openai/gpt-4o-mini', temperature=0.7)
# JavaScript to force dark theme - added to head
dark_mode_script = """
<script>
function setDarkTheme() {
const url = new URL(window.location);
if (url.searchParams.get('__theme') !== 'dark') {
url.searchParams.set('__theme', 'dark');
window.location.href = url.href;
}
}
document.addEventListener('DOMContentLoaded', setDarkTheme);
window.addEventListener('load', setDarkTheme);
// Also try to set it immediately
setDarkTheme();
</script>
"""
available_models = [
"mistralai/mistral-nemo",
"meta-llama/llama-3.3-70b-instruct",
"qwen/qwen-2.5-72b-instruct",
"openai/gpt-4o-mini",
"meta-llama/llama-3.2-3b-instruct",
]
# Create the Gradio interface with dark theme script in head
with gr.Blocks(head=dark_mode_script) as app:
gr.Markdown("""
# πŸ€– **augini** - your tabular AI data analysis assistant
**augini** is an agentic AI system designed to help you analyze and understand your data through natural conversation.
Upload your data file and start chatting to uncover insights!
> πŸ’‘ **Tip**: Ask questions about patterns, relationships, or any aspect of your data. **augini** will provide detailed, evidence-based answers.
""", elem_classes=["center-content"])
with gr.Accordion("βš™οΈ Model Settings", open=False):
model_dropdown = gr.Dropdown(
label="Select Model",
choices=available_models,
value="openai/gpt-4o-mini"
)
temperature_slider = gr.Slider(
label="Temperature",
minimum=0.0,
maximum=1.0,
value=0.7,
step=0.05
)
def update_settings(model_name, temperature):
chat_handler.update_model_settings(model_name, temperature)
return f"Model settings updated: {model_name}, Temperature: {temperature}"
update_button = gr.Button("Update Model Settings")
update_status = gr.Textbox(label="Update Status", interactive=False)
update_button.click(
update_settings,
inputs=[model_dropdown, temperature_slider],
outputs=[update_status]
)
with gr.Row(elem_classes=["container"]):
# Left sidebar for file upload
with gr.Column(scale=1, elem_classes=["sidebar"]):
gr.Markdown("### πŸ“ upload your data")
file_upload = gr.File(
label="Upload Data File",
file_types=[".csv", ".xlsx", ".xls"],
elem_classes=["file-upload"]
)
file_status = gr.Textbox(
label="Upload Status",
interactive=False,
elem_classes=["status-box"]
)
# Main chat area
with gr.Column(scale=3, elem_classes=["main-content"]):
chatbot = gr.Chatbot(
label="Chat History",
height=500,
elem_classes=["chat-window"]
)
with gr.Row():
msg = gr.Textbox(
label="your question",
placeholder="ask me anything about your data...",
lines=2,
scale=4,
elem_classes=["question-input"]
)
submit_btn = gr.Button("send πŸ“€", scale=1, elem_classes=["submit-btn"])
clear = gr.Button("clear chat πŸ—‘οΈ", elem_classes=["clear-btn"])
# Examples and Documentation in a collapsible section
with gr.Accordion("πŸ“š examples & features", open=False, elem_classes=["docs-section"]):
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("""
### 🎯 example questions
**data overview**
- "what are the key patterns in this dataset?"
- "give me a summary of the main statistics"
**data quality**
- "are there any missing values?"
- "how clean is this dataset?"
**relationships**
- "show me the correlations between columns"
- "what variables are most related?"
**deep analysis**
- "what insights can you find about [column]?"
- "is this a synthetic dataset?"
""")
with gr.Column(scale=1):
gr.Markdown("""
### ✨ features
**smart analysis**
- advanced statistical analysis
- pattern recognition
- anomaly detection
**data support**
- csv files
- excel files (.xlsx, .xls)
- automatic type detection
**ai capabilities**
- natural language understanding
- context-aware responses
- evidence-based insights
""")
# Add powered by link
gr.Markdown("""
<div class="powered-by">
powered by <a href="https://tabularis.ai" target="_blank">tabularis.ai</a>
</div>
""", elem_classes=["footer"])
# Set up event handlers
file_upload.upload(
chat_handler.upload_file,
inputs=[file_upload],
outputs=[file_status]
)
# Add both message submission methods
msg.submit(
chat_handler.chat_with_data,
inputs=[msg, chatbot],
outputs=[msg, chatbot]
)
submit_btn.click(
chat_handler.chat_with_data,
inputs=[msg, chatbot],
outputs=[msg, chatbot]
)
clear.click(lambda: ([], None), None, [chatbot, msg], queue=False)
return app
if __name__ == "__main__":
app = create_app()
app.launch(share=True)