AI-RESEARCHER-2024 commited on
Commit
dc2f415
β€’
1 Parent(s): 0292d64

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +11 -7
Dockerfile CHANGED
@@ -6,6 +6,7 @@ ENV PYTHONUNBUFFERED=1
6
  ENV HOME=/home/appuser
7
  ENV APP_HOME=/home/appuser/app
8
  ENV PATH="/home/appuser/.local/bin:${PATH}"
 
9
 
10
  # Install system dependencies
11
  RUN apt-get update && apt-get install -y \
@@ -28,17 +29,20 @@ WORKDIR $APP_HOME
28
  # Copy the application files
29
  COPY --chown=appuser:appuser . $APP_HOME/
30
 
31
- # Download the model
32
- RUN mkdir -p $APP_HOME/models && \
33
- wget -O $APP_HOME/models/llama-model.gguf https://huggingface.co/bartowski/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/main/Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf
 
 
 
 
 
 
 
34
 
35
  # Install Python dependencies
36
  RUN pip install --user --no-cache-dir -r requirements.txt
37
 
38
- # Create necessary directories with correct permissions
39
- RUN mkdir -p $APP_HOME/.chainlit/files && \
40
- mkdir -p $APP_HOME/mydb
41
-
42
  # Create a chainlit.md file
43
  RUN echo "# Welcome to RAG Chainlit Application! πŸ‘‹\n\nThis is a Retrieval-Augmented Generation application using Llama.cpp." > $APP_HOME/chainlit.md
44
 
 
6
  ENV HOME=/home/appuser
7
  ENV APP_HOME=/home/appuser/app
8
  ENV PATH="/home/appuser/.local/bin:${PATH}"
9
+ ENV HF_HOME="/home/appuser/.cache/huggingface"
10
 
11
  # Install system dependencies
12
  RUN apt-get update && apt-get install -y \
 
29
  # Copy the application files
30
  COPY --chown=appuser:appuser . $APP_HOME/
31
 
32
+ # Create necessary directories
33
+ RUN mkdir -p $APP_HOME/.chainlit/files && \
34
+ mkdir -p $APP_HOME/mydb && \
35
+ mkdir -p $APP_HOME/models && \
36
+ mkdir -p $HF_HOME
37
+
38
+ # Download the model using huggingface-cli
39
+ RUN pip install --user --no-cache-dir huggingface_hub && \
40
+ huggingface-cli download bartowski/Meta-Llama-3.1-8B-Instruct-GGUF Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf --local-dir $APP_HOME/models/ && \
41
+ mv $APP_HOME/models/Meta-Llama-3.1-8B-Instruct-IQ2_M.gguf $APP_HOME/models/llama-model.gguf
42
 
43
  # Install Python dependencies
44
  RUN pip install --user --no-cache-dir -r requirements.txt
45
 
 
 
 
 
46
  # Create a chainlit.md file
47
  RUN echo "# Welcome to RAG Chainlit Application! πŸ‘‹\n\nThis is a Retrieval-Augmented Generation application using Llama.cpp." > $APP_HOME/chainlit.md
48