AI-RESEARCHER-2024 commited on
Commit
45331d7
β€’
1 Parent(s): 76ccd36

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +36 -18
Dockerfile CHANGED
@@ -1,37 +1,55 @@
1
  # Use an official Python runtime as a parent image
2
  FROM python:3.10-slim
3
 
 
 
 
 
 
 
4
  # Install system dependencies
5
  RUN apt-get update && apt-get install -y \
6
  build-essential \
 
 
 
7
  curl \
8
  && rm -rf /var/lib/apt/lists/*
9
 
10
- # Install Ollama
11
- RUN curl -fsSL https://ollama.ai/install.sh | sh
 
 
 
 
 
 
12
 
13
- # Set working directory
14
- WORKDIR /app
15
 
16
- # Copy the current directory contents into the container
17
- COPY . .
 
18
 
19
  # Install Python dependencies
20
- RUN pip install --no-cache-dir \
21
  chainlit \
22
  langchain \
23
- langchain_community \
24
- langchain_core \
25
  chromadb \
26
- pydantic \
27
- typing-extensions
 
28
 
29
- # Make port 8000 available to the world outside this container
30
- EXPOSE 8000
 
31
 
32
- # Create a script to start both Ollama and the Chainlit app
33
- RUN echo '#!/bin/bash\nollama serve &\nsleep 5\nollama pull llama3.2\nollama pull llama3.2\nchainlit run app.py --host 0.0.0.0 --port 8000' > start.sh
34
- RUN chmod +x start.sh
 
 
35
 
36
- # Run the script when the container launches
37
- CMD ["./start.sh"]
 
1
  # Use an official Python runtime as a parent image
2
  FROM python:3.10-slim
3
 
4
+ # Set environment variables
5
+ ENV PYTHONUNBUFFERED=1
6
+ ENV HOME=/home/appuser
7
+ ENV APP_HOME=/home/appuser/app
8
+ ENV PATH="/home/appuser/.local/bin:${PATH}"
9
+
10
  # Install system dependencies
11
  RUN apt-get update && apt-get install -y \
12
  build-essential \
13
+ cmake \
14
+ git \
15
+ wget \
16
  curl \
17
  && rm -rf /var/lib/apt/lists/*
18
 
19
+ # Create a non-root user
20
+ RUN useradd -m -u 1000 appuser && \
21
+ mkdir -p $APP_HOME && \
22
+ chown -R appuser:appuser /home/appuser
23
+
24
+ # Switch to the non-root user
25
+ USER appuser
26
+ WORKDIR $APP_HOME
27
 
28
+ # Copy the application files
29
+ COPY --chown=appuser:appuser . $APP_HOME/
30
 
31
+ # Download the model
32
+ RUN mkdir -p $APP_HOME/models && \
33
+ wget -O $APP_HOME/models/llama-model.gguf https://huggingface.co/bartowski/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/main/Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf
34
 
35
  # Install Python dependencies
36
+ RUN pip install --user --no-cache-dir \
37
  chainlit \
38
  langchain \
 
 
39
  chromadb \
40
+ sentence-transformers \
41
+ llama-cpp-python \
42
+ --extra-index-url https://download.pytorch.org/whl/cpu
43
 
44
+ # Create necessary directories with correct permissions
45
+ RUN mkdir -p $APP_HOME/.chainlit/files && \
46
+ mkdir -p $APP_HOME/mydb
47
 
48
+ # Create a chainlit.md file
49
+ RUN echo "# Welcome to RAG Chainlit Application! πŸ‘‹\n\nThis is a Retrieval-Augmented Generation application using Llama.cpp." > $APP_HOME/chainlit.md
50
+
51
+ # Expose port 8000
52
+ EXPOSE 8000
53
 
54
+ # Start the Chainlit app
55
+ CMD ["chainlit", "run", "app.py", "--host", "0.0.0.0", "--port", "8000"]