L3-Evil-Hermes-API / Dockerfile
MORANA887's picture
Update Dockerfile
176dcaa verified
raw
history blame contribute delete
939 Bytes
FROM python:3.9-slim
# Install FULL dependencies (including gcc-12, cmake, and required libs)
RUN apt-get update && apt-get install -y \
build-essential \
git \
cmake \
libopenblas-dev \
liblapack-dev \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Clone llama.cpp (with --recursive for ggml submodule)
RUN git clone --recursive https://github.com/ggerganov/llama.cpp.git && \
cd llama.cpp && \
mkdir build && \
cd build && \
cmake .. -DLLAMA_OPENBLAS=ON && \
cmake --build . --config Release -j $(nproc)
# Download your GGUF model (using huggingface-hub for reliability)
RUN pip install huggingface-hub && \
huggingface-cli download DavidAU/L3.1-Evil-Reasoning-Dark-Planet-Hermes-R1-Uncensored-8B-GGUF --local-dir . --include "*.gguf"
# Install Python API dependencies
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY app.py .
CMD ["python", "app.py"]