Spaces:
Running
Running
Update Dockerfile
Browse files- Dockerfile +81 -1
Dockerfile
CHANGED
@@ -6,4 +6,84 @@ ENV HOME=/root
|
|
6 |
# CUDA paths for when running on GPU (RunPod)
|
7 |
ENV CUDA_HOME=/usr/local/cuda
|
8 |
ENV PATH=${CUDA_HOME}/bin:${PATH}
|
9 |
-
ENV LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
# CUDA paths for when running on GPU (RunPod)
|
7 |
ENV CUDA_HOME=/usr/local/cuda
|
8 |
ENV PATH=${CUDA_HOME}/bin:${PATH}
|
9 |
+
ENV LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${LD_LIBRARY_PATH}
|
10 |
+
|
11 |
+
# Install dependencies, add deadsnakes PPA for Python 3.12
|
12 |
+
RUN apt-get update && \
|
13 |
+
apt-get install -y software-properties-common && \
|
14 |
+
add-apt-repository ppa:deadsnakes/ppa && \
|
15 |
+
apt-get update && \
|
16 |
+
apt-get install -y \
|
17 |
+
curl \
|
18 |
+
wget \
|
19 |
+
gpg \
|
20 |
+
apt-transport-https \
|
21 |
+
git \
|
22 |
+
python3.12 \
|
23 |
+
python3.12-venv \
|
24 |
+
python3.12-dev \
|
25 |
+
python3-pip && \
|
26 |
+
# Install Node.js 22.x from NodeSource
|
27 |
+
curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \
|
28 |
+
apt-get install -y nodejs && \
|
29 |
+
# Make Python 3.12 the default
|
30 |
+
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \
|
31 |
+
update-alternatives --set python3 /usr/bin/python3.12 && \
|
32 |
+
# Install pip for Python 3.12
|
33 |
+
curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12 && \
|
34 |
+
# Upgrade pip to latest version
|
35 |
+
python3 -m pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
36 |
+
# Install code-server
|
37 |
+
curl -fsSL https://code-server.dev/install.sh | sh && \
|
38 |
+
apt-get clean && \
|
39 |
+
rm -rf /var/lib/apt/lists/*
|
40 |
+
|
41 |
+
# Install global npm packages
|
42 |
+
RUN npm install -g @anthropic-ai/claude-code @anthropic-ai/dxt
|
43 |
+
|
44 |
+
# Create a directory for the workspace
|
45 |
+
RUN mkdir -p /workspace
|
46 |
+
|
47 |
+
# Copy requirements file (if it exists)
|
48 |
+
COPY requirements.txt* /workspace/
|
49 |
+
|
50 |
+
# Install Python packages if requirements.txt exists
|
51 |
+
RUN if [ -f /workspace/requirements.txt ]; then \
|
52 |
+
pip3 install --no-cache-dir -r /workspace/requirements.txt; \
|
53 |
+
fi
|
54 |
+
|
55 |
+
# Create configuration directory for code-server and Ollama
|
56 |
+
RUN mkdir -p /root/.config/code-server /root/.ollama
|
57 |
+
|
58 |
+
# Configure code-server to run on port 8443
|
59 |
+
RUN echo "bind-addr: 0.0.0.0:8443\nauth: none\ncert: false" > /root/.config/code-server/config.yaml
|
60 |
+
|
61 |
+
# Install Ollama after code-server is set up
|
62 |
+
RUN curl -fsSL https://ollama.com/install.sh | sh || true
|
63 |
+
|
64 |
+
# Install some useful VS Code extensions
|
65 |
+
RUN code-server --install-extension ms-python.python && \
|
66 |
+
code-server --install-extension ritwickdey.LiveServer && \
|
67 |
+
code-server --install-extension ms-toolsai.jupyter
|
68 |
+
|
69 |
+
# Create a startup script
|
70 |
+
RUN echo '#!/bin/bash\n\
|
71 |
+
# Start Ollama in the background\n\
|
72 |
+
/usr/local/bin/ollama serve &\n\
|
73 |
+
\n\
|
74 |
+
# Give Ollama a moment to start\n\
|
75 |
+
sleep 2\n\
|
76 |
+
\n\
|
77 |
+
# Start code-server in the foreground\n\
|
78 |
+
exec code-server --disable-telemetry --bind-addr 0.0.0.0:8443 /workspace\n\
|
79 |
+
' > /start.sh && \
|
80 |
+
chmod +x /start.sh
|
81 |
+
|
82 |
+
# Expose ports for both services
|
83 |
+
EXPOSE 8443 11434
|
84 |
+
|
85 |
+
# Set the workspace as working directory
|
86 |
+
WORKDIR /workspace
|
87 |
+
|
88 |
+
# Start both services
|
89 |
+
CMD ["/start.sh"]
|