File size: 1,606 Bytes
84f58ef
 
430c866
45331d7
 
 
 
 
dc2f415
45331d7
84f58ef
7ff268b
 
 
 
 
 
 
430c866
45331d7
 
 
 
 
 
 
 
430c866
45331d7
 
41541aa
dc2f415
 
 
 
 
 
 
7ff268b
 
 
430c866
84f58ef
601a9ff
84f58ef
45331d7
 
 
1a0f6ba
 
84f58ef
45331d7
72141ef
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# Use an official Python runtime as a parent image
FROM python:3.10-slim

# Set environment variables
ENV PYTHONUNBUFFERED=1
ENV HOME=/home/appuser
ENV APP_HOME=/home/appuser/app
ENV PATH="/home/appuser/.local/bin:${PATH}"
ENV HF_HOME="/home/appuser/.cache/huggingface"

# Install system dependencies
RUN apt-get update && apt-get install -y \
    build-essential \
    cmake \
    git \
    wget \
    curl \
    && rm -rf /var/lib/apt/lists/*

# Create a non-root user
RUN useradd -m -u 1000 appuser && \
    mkdir -p $APP_HOME && \
    chown -R appuser:appuser /home/appuser

# Switch to the non-root user
USER appuser
WORKDIR $APP_HOME

# Copy the application files
COPY --chown=appuser:appuser . $APP_HOME/

# Create necessary directories
RUN mkdir -p $APP_HOME/.chainlit/files && \
    mkdir -p $APP_HOME/mydb && \
    mkdir -p $APP_HOME/models && \
    mkdir -p $HF_HOME

# Download the model using huggingface-cli
RUN pip install --user --no-cache-dir huggingface_hub && \
    huggingface-cli download bartowski/Meta-Llama-3.1-8B-Instruct-GGUF Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf --local-dir $APP_HOME/models/ && \
    mv $APP_HOME/models/Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf $APP_HOME/models/llama-model.gguf

# Install Python dependencies
RUN pip install --user --no-cache-dir -r requirements.txt

# Create a chainlit.md file
RUN echo "# Welcome to RAG Chainlit Application! 👋\n\nThis is a Retrieval-Augmented Generation application using Llama.cpp." > $APP_HOME/chainlit.md

# Expose port 7860
EXPOSE 7860

# Start the Chainlit app
CMD ["chainlit", "run", "app.py", "--port", "7860", "-h"]