Spaces:
Running
Running
Update Dockerfile
Browse files- Dockerfile +70 -41
Dockerfile
CHANGED
@@ -1,79 +1,108 @@
|
|
1 |
-
FROM
|
2 |
|
3 |
# Set environment variables
|
4 |
ENV DEBIAN_FRONTEND=noninteractive
|
5 |
ENV HOME=/root
|
6 |
-
# CUDA paths for when running on GPU (RunPod)
|
7 |
ENV CUDA_HOME=/usr/local/cuda
|
8 |
ENV PATH=${CUDA_HOME}/bin:${PATH}
|
9 |
ENV LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
|
|
|
10 |
|
11 |
-
# Install dependencies
|
12 |
RUN apt-get update && \
|
13 |
-
apt-get install -y
|
14 |
-
|
15 |
-
|
|
|
|
|
|
|
|
|
|
|
16 |
apt-get update && \
|
17 |
-
apt-get install -y python3.
|
18 |
-
|
19 |
-
update-alternatives --
|
20 |
-
|
21 |
-
# Install pip for Python 3.12
|
22 |
-
curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11 && \
|
23 |
-
# Upgrade pip
|
24 |
-
python3 -m pip install --upgrade pip setuptools wheel && \
|
25 |
-
# Install Node.js 22.x from NodeSource
|
26 |
-
curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \
|
27 |
-
apt-get install -y nodejs && \
|
28 |
-
# Install code-server
|
29 |
-
curl -fsSL https://code-server.dev/install.sh | sh && \
|
30 |
-
apt-get clean && \
|
31 |
-
rm -rf /var/lib/apt/lists/*
|
32 |
|
33 |
-
# Install
|
34 |
-
RUN
|
|
|
35 |
|
36 |
-
# Create
|
37 |
RUN mkdir -p /workspace
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
-
#
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
|
|
44 |
|
45 |
-
#
|
46 |
-
RUN
|
47 |
|
48 |
-
#
|
49 |
-
RUN
|
50 |
|
51 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
RUN curl -fsSL https://ollama.com/install.sh | sh || true
|
53 |
|
54 |
-
#
|
|
|
|
|
|
|
|
|
55 |
RUN code-server --install-extension ms-python.python && \
|
56 |
code-server --install-extension ritwickdey.LiveServer && \
|
57 |
code-server --install-extension ms-toolsai.jupyter
|
58 |
|
59 |
-
# Create
|
60 |
RUN echo '#!/bin/bash\n\
|
|
|
|
|
|
|
61 |
# Start Ollama in the background\n\
|
62 |
/usr/local/bin/ollama serve &\n\
|
63 |
\n\
|
64 |
# Give Ollama a moment to start\n\
|
65 |
sleep 2\n\
|
66 |
\n\
|
67 |
-
# Start code-server
|
68 |
exec code-server --disable-telemetry --bind-addr 0.0.0.0:8443 /workspace\n\
|
69 |
' > /start.sh && \
|
70 |
chmod +x /start.sh
|
71 |
|
72 |
-
# Expose ports
|
73 |
EXPOSE 8443 11434
|
74 |
|
75 |
-
#
|
76 |
-
|
|
|
|
|
|
|
77 |
|
78 |
-
# Start both services
|
79 |
CMD ["/start.sh"]
|
|
|
1 |
+
FROM nvidia/cuda:12.8.0-devel-ubuntu22.04
|
2 |
|
3 |
# Set environment variables
|
4 |
ENV DEBIAN_FRONTEND=noninteractive
|
5 |
ENV HOME=/root
|
|
|
6 |
ENV CUDA_HOME=/usr/local/cuda
|
7 |
ENV PATH=${CUDA_HOME}/bin:${PATH}
|
8 |
ENV LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
|
9 |
+
ENV TORCH_CUDA_ARCH_LIST="12.0"
|
10 |
|
11 |
+
# Install system dependencies and build tools
|
12 |
RUN apt-get update && \
|
13 |
+
apt-get install -y \
|
14 |
+
curl wget gpg apt-transport-https git software-properties-common \
|
15 |
+
build-essential cmake ninja-build \
|
16 |
+
libopenblas-dev libomp-dev \
|
17 |
+
&& rm -rf /var/lib/apt/lists/*
|
18 |
+
|
19 |
+
# Install Python 3.12 (as recommended)
|
20 |
+
RUN add-apt-repository ppa:deadsnakes/ppa && \
|
21 |
apt-get update && \
|
22 |
+
apt-get install -y python3.12 python3.12-venv python3.12-dev && \
|
23 |
+
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \
|
24 |
+
update-alternatives --set python3 /usr/bin/python3.12 && \
|
25 |
+
curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
+
# Install uv for faster dependency management
|
28 |
+
RUN curl -LsSf https://astral.sh/uv/install.sh | sh && \
|
29 |
+
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
30 |
|
31 |
+
# Create workspace
|
32 |
RUN mkdir -p /workspace
|
33 |
+
WORKDIR /workspace
|
34 |
+
|
35 |
+
# CRITICAL: Install in the EXACT order specified in the README
|
36 |
+
# Step 1: Create venv
|
37 |
+
RUN python3 -m venv /opt/unsloth-env
|
38 |
+
ENV PATH="/opt/unsloth-env/bin:$PATH"
|
39 |
+
|
40 |
+
# Step 2: Install vllm with cu128 (MUST be first)
|
41 |
+
RUN /opt/unsloth-env/bin/pip install --upgrade pip setuptools wheel && \
|
42 |
+
/opt/unsloth-env/bin/pip install -U vllm --extra-index-url https://wheels.vllm.ai/nightly
|
43 |
+
|
44 |
+
# Step 3: Install unsloth dependencies
|
45 |
+
RUN /opt/unsloth-env/bin/pip install unsloth unsloth_zoo bitsandbytes
|
46 |
|
47 |
+
# Step 4: Build xformers from source (no Blackwell wheels exist yet)
|
48 |
+
RUN git clone --depth=1 https://github.com/facebookresearch/xformers --recursive /tmp/xformers && \
|
49 |
+
cd /tmp/xformers && \
|
50 |
+
/opt/unsloth-env/bin/pip uninstall -y xformers && \
|
51 |
+
/opt/unsloth-env/bin/python setup.py install && \
|
52 |
+
rm -rf /tmp/xformers
|
53 |
|
54 |
+
# Step 5: Update triton to >=3.3.1 for Blackwell
|
55 |
+
RUN /opt/unsloth-env/bin/pip install -U "triton>=3.3.1"
|
56 |
|
57 |
+
# Step 6: Pin transformers to avoid gradient checkpointing bug
|
58 |
+
RUN /opt/unsloth-env/bin/pip install -U "transformers==4.52.4"
|
59 |
|
60 |
+
# Step 7: Might need to downgrade numpy
|
61 |
+
RUN /opt/unsloth-env/bin/pip install "numpy<=2.2"
|
62 |
+
|
63 |
+
# Now install your dev tools (code-server, Node.js, etc)
|
64 |
+
RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \
|
65 |
+
apt-get install -y nodejs && \
|
66 |
+
curl -fsSL https://code-server.dev/install.sh | sh && \
|
67 |
+
npm install -g @anthropic-ai/claude-code @anthropic-ai/dxt && \
|
68 |
+
apt-get clean && \
|
69 |
+
rm -rf /var/lib/apt/lists/*
|
70 |
+
|
71 |
+
# Install Ollama
|
72 |
RUN curl -fsSL https://ollama.com/install.sh | sh || true
|
73 |
|
74 |
+
# Configure code-server
|
75 |
+
RUN mkdir -p /root/.config/code-server /root/.ollama && \
|
76 |
+
echo "bind-addr: 0.0.0.0:8443\nauth: none\ncert: false" > /root/.config/code-server/config.yaml
|
77 |
+
|
78 |
+
# Install VS Code extensions
|
79 |
RUN code-server --install-extension ms-python.python && \
|
80 |
code-server --install-extension ritwickdey.LiveServer && \
|
81 |
code-server --install-extension ms-toolsai.jupyter
|
82 |
|
83 |
+
# Create startup script with proper env activation
|
84 |
RUN echo '#!/bin/bash\n\
|
85 |
+
# Activate the unsloth environment\n\
|
86 |
+
source /opt/unsloth-env/bin/activate\n\
|
87 |
+
\n\
|
88 |
# Start Ollama in the background\n\
|
89 |
/usr/local/bin/ollama serve &\n\
|
90 |
\n\
|
91 |
# Give Ollama a moment to start\n\
|
92 |
sleep 2\n\
|
93 |
\n\
|
94 |
+
# Start code-server with the activated environment\n\
|
95 |
exec code-server --disable-telemetry --bind-addr 0.0.0.0:8443 /workspace\n\
|
96 |
' > /start.sh && \
|
97 |
chmod +x /start.sh
|
98 |
|
99 |
+
# Expose ports
|
100 |
EXPOSE 8443 11434
|
101 |
|
102 |
+
# Copy any user requirements AFTER base installation
|
103 |
+
COPY requirements.txt* /workspace/
|
104 |
+
RUN if [ -f /workspace/requirements.txt ]; then \
|
105 |
+
/opt/unsloth-env/bin/pip install -r /workspace/requirements.txt; \
|
106 |
+
fi
|
107 |
|
|
|
108 |
CMD ["/start.sh"]
|