fair-plai / app.py
Cédric KACZMAREK
après hackathon
861919a
raw
history blame
3.04 kB
import os
import json
import pandas as pd
import gradio as gr
from llama_index.core import (
VectorStoreIndex,
download_loader,
StorageContext
)
import logging
from dotenv import load_dotenv, find_dotenv
from pathlib import Path
# from llama_index.llms.mistralai import MistralAI
from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage
# from llama_index.embeddings.mistralai import MistralAIEmbedding
from src.utils_fct import *
TITLE = "RIZOA-AUCHAN Chatbot Demo"
DESCRIPTION = "Example of an assistant with Gradio, coupling with function callings and Mistral AI via its API"
PLACEHOLDER = (
"Vous pouvez me posez une question, appuyer sur Entrée pour valider"
)
EXAMPLES = ["Comment fait on pour produire du maïs ?", "Rédige moi une lettre pour faire un stage dans une exploitation agricole", "Comment reprendre une exploitation agricole ?"]
MODEL = "mistral-large-latest"
# FILE = Path(__file__).resolve()
# BASE_PATH = FILE.parents[0]
load_dotenv()
ENV_API_KEY = os.environ.get("MISTRAL_API_KEY")
# HISTORY = pd.read_csv(os.path.join(BASE_PATH, "data/cereal_price.csv"), encoding="latin-1")
# HISTORY = HISTORY[[HISTORY["memberStateName"]=="France"]]
# HISTORY['price'] = HISTORY['price'].str.replace(",", ".").astype('float64')
# Define LLMs
CLIENT = MistralClient(api_key=ENV_API_KEY)
# EMBED_MODEL = MistralAIEmbedding(model_name="mistral-embed", api_key=ENV_API_KEY)
with gr.Blocks() as demo:
with gr.Row():
with gr.Column(scale=1):
gr.Image(value= os.path.join(BASE_PATH, "img/logo_rizoa_auchan.jpg"),#".\img\logo_rizoa_auchan.jpg",
height=250,
width=250,
container=False,
show_download_button=False
)
with gr.Column(scale=4):
gr.Markdown(
"""
# Bienvenue au Chatbot FAIR-PLAI
Ce chatbot est un assistant numérique, médiateur des vendeurs-acheteurs
"""
)
gr.Markdown(f""" ### {DESCRIPTION} """)
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder=PLACEHOLDER)
clear = gr.ClearButton([msg, chatbot])
def respond(message, chat_history):
messages = [ChatMessage(role="user", content=message)]
# response = client.chat(
# model=MODEL,
# messages=messages)
response = forecast(messages)
# prompt = f"Reformule le résultat suivant {response}"
# prompt = [ChatMessage(role="user", content=prompt)]
# chat_history.append((message, str(response)))
final_response = CLIENT.chat(
model=MODEL,
messages=response
).choices[0].message.content
return "", [[None, None],
[None, str(final_response)]
]
msg.submit(respond, [msg, chatbot], [msg, chatbot])
# demo.title = TITLE
if __name__ == "__main__":
demo.launch()