Spaces:
Runtime error
Runtime error
# Use an official Python runtime as a parent image | |
FROM python:3.10-slim | |
# Set environment variables | |
ENV PYTHONUNBUFFERED=1 | |
ENV HOME=/home/appuser | |
ENV APP_HOME=/home/appuser/app | |
ENV PATH="/home/appuser/.local/bin:${PATH}" | |
ENV HF_HOME="/home/appuser/.cache/huggingface" | |
# Install system dependencies | |
RUN apt-get update && apt-get install -y \ | |
build-essential \ | |
cmake \ | |
git \ | |
wget \ | |
curl \ | |
&& rm -rf /var/lib/apt/lists/* | |
# Create a non-root user | |
RUN useradd -m -u 1000 appuser && \ | |
mkdir -p $APP_HOME && \ | |
chown -R appuser:appuser /home/appuser | |
# Switch to the non-root user | |
USER appuser | |
WORKDIR $APP_HOME | |
# Copy the application files | |
COPY --chown=appuser:appuser . $APP_HOME/ | |
# Create necessary directories | |
RUN mkdir -p $APP_HOME/.chainlit/files && \ | |
mkdir -p $APP_HOME/mydb && \ | |
mkdir -p $APP_HOME/models && \ | |
mkdir -p $HF_HOME | |
# Download the model using huggingface-cli | |
RUN pip install --user --no-cache-dir huggingface_hub && \ | |
huggingface-cli download bartowski/Meta-Llama-3.1-8B-Instruct-GGUF Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf --local-dir $APP_HOME/models/ && \ | |
mv $APP_HOME/models/Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf $APP_HOME/models/llama-model.gguf | |
# Install Python dependencies | |
RUN pip install --user --no-cache-dir -r requirements.txt | |
# Create a chainlit.md file | |
RUN echo "# Welcome to RAG Chainlit Application! π\n\nThis is a Retrieval-Augmented Generation application using Llama.cpp." > $APP_HOME/chainlit.md | |
# Expose port 7860 | |
EXPOSE 7860 | |
# Start the Chainlit app | |
CMD ["chainlit", "run", "app.py", "--port", "7860", "-h"] |