amaye15
commited on
Commit
·
2859561
1
Parent(s):
82079c3
test
Browse files- Dockerfile +145 -5
Dockerfile
CHANGED
@@ -1,3 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
# syntax=docker/dockerfile:1.4
|
2 |
|
3 |
ARG TARGETPLATFORM
|
@@ -13,6 +150,11 @@ FROM python:${PYTHON_VERSION}-slim as base
|
|
13 |
ARG INSTALL_TYPE=basic
|
14 |
ARG ENABLE_GPU=false
|
15 |
|
|
|
|
|
|
|
|
|
|
|
16 |
# Platform-specific labels
|
17 |
LABEL maintainer="unclecode"
|
18 |
LABEL description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & scraper"
|
@@ -77,8 +219,8 @@ fi
|
|
77 |
# Create and set working directory
|
78 |
WORKDIR /app
|
79 |
|
80 |
-
# Copy the entire project
|
81 |
-
COPY . .
|
82 |
|
83 |
# Install base requirements
|
84 |
RUN pip install --no-cache-dir -r requirements.txt
|
@@ -112,7 +254,7 @@ RUN if [ "$INSTALL_TYPE" = "all" ] ; then \
|
|
112 |
pip install "." ; \
|
113 |
fi
|
114 |
|
115 |
-
|
116 |
RUN pip install --no-cache-dir \
|
117 |
mkdocs \
|
118 |
mkdocs-material \
|
@@ -132,7 +274,5 @@ RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
|
132 |
# Expose port
|
133 |
EXPOSE 8000 11235 9222 8080
|
134 |
|
135 |
-
RUN python setup.py install
|
136 |
-
|
137 |
# Start the FastAPI server
|
138 |
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "11235"]
|
|
|
1 |
+
# # syntax=docker/dockerfile:1.4
|
2 |
+
|
3 |
+
# ARG TARGETPLATFORM
|
4 |
+
# ARG BUILDPLATFORM
|
5 |
+
|
6 |
+
# # Other build arguments
|
7 |
+
# ARG PYTHON_VERSION=3.10
|
8 |
+
|
9 |
+
# # Base stage with system dependencies
|
10 |
+
# FROM python:${PYTHON_VERSION}-slim as base
|
11 |
+
|
12 |
+
# # Declare ARG variables again within the build stage
|
13 |
+
# ARG INSTALL_TYPE=basic
|
14 |
+
# ARG ENABLE_GPU=false
|
15 |
+
|
16 |
+
# # Platform-specific labels
|
17 |
+
# LABEL maintainer="unclecode"
|
18 |
+
# LABEL description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & scraper"
|
19 |
+
# LABEL version="1.0"
|
20 |
+
|
21 |
+
# # Environment setup
|
22 |
+
# ENV PYTHONUNBUFFERED=1 \
|
23 |
+
# PYTHONDONTWRITEBYTECODE=1 \
|
24 |
+
# PIP_NO_CACHE_DIR=1 \
|
25 |
+
# PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
26 |
+
# PIP_DEFAULT_TIMEOUT=100 \
|
27 |
+
# DEBIAN_FRONTEND=noninteractive
|
28 |
+
|
29 |
+
# # Install system dependencies
|
30 |
+
# RUN apt-get update && apt-get install -y --no-install-recommends \
|
31 |
+
# build-essential \
|
32 |
+
# curl \
|
33 |
+
# wget \
|
34 |
+
# gnupg \
|
35 |
+
# git \
|
36 |
+
# cmake \
|
37 |
+
# pkg-config \
|
38 |
+
# python3-dev \
|
39 |
+
# libjpeg-dev \
|
40 |
+
# libpng-dev \
|
41 |
+
# && rm -rf /var/lib/apt/lists/*
|
42 |
+
|
43 |
+
# # Playwright system dependencies for Linux
|
44 |
+
# RUN apt-get update && apt-get install -y --no-install-recommends \
|
45 |
+
# libglib2.0-0 \
|
46 |
+
# libnss3 \
|
47 |
+
# libnspr4 \
|
48 |
+
# libatk1.0-0 \
|
49 |
+
# libatk-bridge2.0-0 \
|
50 |
+
# libcups2 \
|
51 |
+
# libdrm2 \
|
52 |
+
# libdbus-1-3 \
|
53 |
+
# libxcb1 \
|
54 |
+
# libxkbcommon0 \
|
55 |
+
# libx11-6 \
|
56 |
+
# libxcomposite1 \
|
57 |
+
# libxdamage1 \
|
58 |
+
# libxext6 \
|
59 |
+
# libxfixes3 \
|
60 |
+
# libxrandr2 \
|
61 |
+
# libgbm1 \
|
62 |
+
# libpango-1.0-0 \
|
63 |
+
# libcairo2 \
|
64 |
+
# libasound2 \
|
65 |
+
# libatspi2.0-0 \
|
66 |
+
# && rm -rf /var/lib/apt/lists/*
|
67 |
+
|
68 |
+
# # GPU support if enabled and architecture is supported
|
69 |
+
# RUN if [ "$ENABLE_GPU" = "true" ] && [ "$TARGETPLATFORM" = "linux/amd64" ] ; then \
|
70 |
+
# apt-get update && apt-get install -y --no-install-recommends \
|
71 |
+
# nvidia-cuda-toolkit \
|
72 |
+
# && rm -rf /var/lib/apt/lists/* ; \
|
73 |
+
# else \
|
74 |
+
# echo "Skipping NVIDIA CUDA Toolkit installation (unsupported platform or GPU disabled)"; \
|
75 |
+
# fi
|
76 |
+
|
77 |
+
# # Create and set working directory
|
78 |
+
# WORKDIR /app
|
79 |
+
|
80 |
+
# # Copy the entire project
|
81 |
+
# COPY . .
|
82 |
+
|
83 |
+
# # Install base requirements
|
84 |
+
# RUN pip install --no-cache-dir -r requirements.txt
|
85 |
+
|
86 |
+
# # Install required library for FastAPI
|
87 |
+
# RUN pip install fastapi uvicorn psutil
|
88 |
+
|
89 |
+
# # Install ML dependencies first for better layer caching
|
90 |
+
# RUN if [ "$INSTALL_TYPE" = "all" ] ; then \
|
91 |
+
# pip install --no-cache-dir \
|
92 |
+
# torch \
|
93 |
+
# torchvision \
|
94 |
+
# torchaudio \
|
95 |
+
# scikit-learn \
|
96 |
+
# nltk \
|
97 |
+
# transformers \
|
98 |
+
# tokenizers && \
|
99 |
+
# python -m nltk.downloader punkt stopwords ; \
|
100 |
+
# fi
|
101 |
+
|
102 |
+
# # Install the package
|
103 |
+
# RUN if [ "$INSTALL_TYPE" = "all" ] ; then \
|
104 |
+
# pip install ".[all]" && \
|
105 |
+
# python -m crawl4ai.model_loader ; \
|
106 |
+
# elif [ "$INSTALL_TYPE" = "torch" ] ; then \
|
107 |
+
# pip install ".[torch]" ; \
|
108 |
+
# elif [ "$INSTALL_TYPE" = "transformer" ] ; then \
|
109 |
+
# pip install ".[transformer]" && \
|
110 |
+
# python -m crawl4ai.model_loader ; \
|
111 |
+
# else \
|
112 |
+
# pip install "." ; \
|
113 |
+
# fi
|
114 |
+
|
115 |
+
# # Install MkDocs and required plugins
|
116 |
+
# RUN pip install --no-cache-dir \
|
117 |
+
# mkdocs \
|
118 |
+
# mkdocs-material \
|
119 |
+
# mkdocs-terminal \
|
120 |
+
# pymdown-extensions
|
121 |
+
|
122 |
+
# # Build MkDocs documentation
|
123 |
+
# RUN mkdocs build
|
124 |
+
|
125 |
+
# # Install Playwright and browsers
|
126 |
+
# RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
127 |
+
# playwright install chromium; \
|
128 |
+
# elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
129 |
+
# playwright install chromium; \
|
130 |
+
# fi
|
131 |
+
|
132 |
+
# # Expose port
|
133 |
+
# EXPOSE 8000 11235 9222 8080
|
134 |
+
|
135 |
+
# # Start the FastAPI server
|
136 |
+
# CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "11235"]
|
137 |
+
|
138 |
# syntax=docker/dockerfile:1.4
|
139 |
|
140 |
ARG TARGETPLATFORM
|
|
|
150 |
ARG INSTALL_TYPE=basic
|
151 |
ARG ENABLE_GPU=false
|
152 |
|
153 |
+
# Add a non-root user
|
154 |
+
RUN useradd -m -u 1000 user
|
155 |
+
USER user
|
156 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
157 |
+
|
158 |
# Platform-specific labels
|
159 |
LABEL maintainer="unclecode"
|
160 |
LABEL description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & scraper"
|
|
|
219 |
# Create and set working directory
|
220 |
WORKDIR /app
|
221 |
|
222 |
+
# Copy the entire project with correct ownership
|
223 |
+
COPY --chown=user . .
|
224 |
|
225 |
# Install base requirements
|
226 |
RUN pip install --no-cache-dir -r requirements.txt
|
|
|
254 |
pip install "." ; \
|
255 |
fi
|
256 |
|
257 |
+
# Install MkDocs and required plugins
|
258 |
RUN pip install --no-cache-dir \
|
259 |
mkdocs \
|
260 |
mkdocs-material \
|
|
|
274 |
# Expose port
|
275 |
EXPOSE 8000 11235 9222 8080
|
276 |
|
|
|
|
|
277 |
# Start the FastAPI server
|
278 |
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "11235"]
|