Spaces:
Sleeping
Sleeping
Commit
·
03cac6f
1
Parent(s):
df3d747
Implement LLM chat application
Browse files- Dockerfile +13 -2
- app.py +24 -5
- chainlit.md +11 -0
- requirements.txt +5 -2
Dockerfile
CHANGED
@@ -1,13 +1,24 @@
|
|
1 |
-
FROM python:3.
|
2 |
|
|
|
3 |
RUN useradd -m -u 1000 user
|
4 |
USER user
|
5 |
ENV PATH="/home/user/.local/bin:$PATH"
|
6 |
|
|
|
7 |
WORKDIR /app
|
8 |
|
|
|
9 |
COPY --chown=user ./requirements.txt requirements.txt
|
|
|
|
|
10 |
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
11 |
|
|
|
12 |
COPY --chown=user . /app
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.10-slim
|
2 |
|
3 |
+
# Add user - this is the user that will run the app
|
4 |
RUN useradd -m -u 1000 user
|
5 |
USER user
|
6 |
ENV PATH="/home/user/.local/bin:$PATH"
|
7 |
|
8 |
+
# Set the working directory
|
9 |
WORKDIR /app
|
10 |
|
11 |
+
# Copy the requirements file
|
12 |
COPY --chown=user ./requirements.txt requirements.txt
|
13 |
+
|
14 |
+
# Install the dependencies
|
15 |
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
16 |
|
17 |
+
# Copy the app to the container
|
18 |
COPY --chown=user . /app
|
19 |
+
|
20 |
+
# Expose the port
|
21 |
+
EXPOSE 7860
|
22 |
+
|
23 |
+
# Run the app
|
24 |
+
CMD ["chainlit", "run", "app.py", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
CHANGED
@@ -1,7 +1,26 @@
|
|
1 |
-
|
|
|
|
|
2 |
|
3 |
-
|
|
|
4 |
|
5 |
-
@
|
6 |
-
def
|
7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import chainlit as cl
|
3 |
+
from openai import OpenAI
|
4 |
|
5 |
+
# Initialize the OpenAI client
|
6 |
+
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
|
7 |
|
8 |
+
@cl.on_message
|
9 |
+
async def main(message: cl.Message):
|
10 |
+
# Get user's message
|
11 |
+
user_message = message.content
|
12 |
+
|
13 |
+
# Call OpenAI API
|
14 |
+
response = client.chat.completions.create(
|
15 |
+
model="gpt-3.5-turbo",
|
16 |
+
messages=[
|
17 |
+
{"role": "system", "content": "You are a helpful assistant."},
|
18 |
+
{"role": "user", "content": user_message}
|
19 |
+
],
|
20 |
+
temperature=0.7,
|
21 |
+
)
|
22 |
+
|
23 |
+
# Send response back to user
|
24 |
+
await cl.Message(
|
25 |
+
content=response.choices[0].message.content,
|
26 |
+
).send()
|
chainlit.md
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Welcome to My LLM App! 👋
|
2 |
+
|
3 |
+
This is a simple LLM application built with Chainlit.
|
4 |
+
|
5 |
+
## How to use
|
6 |
+
|
7 |
+
1. Type your message in the chat box below
|
8 |
+
2. Press Enter or click the Send button
|
9 |
+
3. Wait for the AI to respond
|
10 |
+
|
11 |
+
Feel free to ask me anything!
|
requirements.txt
CHANGED
@@ -1,2 +1,5 @@
|
|
1 |
-
|
2 |
-
|
|
|
|
|
|
|
|
1 |
+
chainlit==0.7.501
|
2 |
+
openai
|
3 |
+
tiktoken
|
4 |
+
langchain
|
5 |
+
python-dotenv
|