miike-ai commited on
Commit
2a578a9
·
verified ·
1 Parent(s): fcfcf5d

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +9 -19
Dockerfile CHANGED
@@ -14,12 +14,12 @@ RUN apt-get update && \
14
  # Add Python 3.12
15
  add-apt-repository ppa:deadsnakes/ppa && \
16
  apt-get update && \
17
- apt-get install -y python3.12 python3.12-venv python3.12-dev && \
18
  # Make Python 3.12 the default
19
  update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \
20
- update-alternatives --set python3 /usr/bin/python3.12 && \
21
  # Install pip for Python 3.12
22
- curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12 && \
23
  # Upgrade pip
24
  python3 -m pip install --upgrade pip setuptools wheel && \
25
  # Install Node.js 22.x from NodeSource
@@ -36,15 +36,11 @@ RUN npm install -g @anthropic-ai/claude-code @anthropic-ai/dxt
36
  # Create a directory for the workspace
37
  RUN mkdir -p /workspace
38
 
39
- # Copy requirements files (if they exist)
40
- COPY requirements*.txt /workspace/
41
-
42
- # Create a requirements-cpu.txt without vllm for build time
43
- RUN if [ -f /workspace/requirements.txt ]; then \
44
- grep -v "vllm" /workspace/requirements.txt | grep -v "xformers==.*+.*" > /workspace/requirements-cpu.txt || true; \
45
- echo "xformers==0.0.32" >> /workspace/requirements-cpu.txt; \
46
- pip3 install --no-cache-dir -r /workspace/requirements-cpu.txt || true; \
47
- fi
48
 
49
  # Create configuration directory for code-server and Ollama
50
  RUN mkdir -p /root/.config/code-server /root/.ollama
@@ -60,14 +56,8 @@ RUN code-server --install-extension ms-python.python && \
60
  code-server --install-extension ritwickdey.LiveServer && \
61
  code-server --install-extension ms-toolsai.jupyter
62
 
63
- # Create a startup script that handles vllm installation on GPU
64
  RUN echo '#!/bin/bash\n\
65
- # Check if running on GPU and install vllm if needed\n\
66
- if [ -d "/usr/local/cuda" ] && ! python3 -c "import vllm" 2>/dev/null; then\n\
67
- echo "GPU detected, installing vllm for Blackwell support..."\n\
68
- pip3 install git+https://github.com/vllm-project/vllm.git@04e1642e3 || echo "vllm installation failed, continuing..."\n\
69
- fi\n\
70
- \n\
71
  # Start Ollama in the background\n\
72
  /usr/local/bin/ollama serve &\n\
73
  \n\
 
14
  # Add Python 3.12
15
  add-apt-repository ppa:deadsnakes/ppa && \
16
  apt-get update && \
17
+ apt-get install -y python3.11 python3.11-venv python3.11-dev && \
18
  # Make Python 3.12 the default
19
  update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \
20
+ update-alternatives --set python3 /usr/bin/python3.11 && \
21
  # Install pip for Python 3.12
22
+ curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11 && \
23
  # Upgrade pip
24
  python3 -m pip install --upgrade pip setuptools wheel && \
25
  # Install Node.js 22.x from NodeSource
 
36
  # Create a directory for the workspace
37
  RUN mkdir -p /workspace
38
 
39
+ # Copy and install requirements - fail loudly if not found
40
+ COPY requirements.txt /workspace/requirements.txt
41
+ RUN echo "Installing Python requirements..." && \
42
+ pip3 install --no-cache-dir -r /workspace/requirements.txt && \
43
+ echo "Requirements installed successfully!"
 
 
 
 
44
 
45
  # Create configuration directory for code-server and Ollama
46
  RUN mkdir -p /root/.config/code-server /root/.ollama
 
56
  code-server --install-extension ritwickdey.LiveServer && \
57
  code-server --install-extension ms-toolsai.jupyter
58
 
59
+ # Create a startup script
60
  RUN echo '#!/bin/bash\n\
 
 
 
 
 
 
61
  # Start Ollama in the background\n\
62
  /usr/local/bin/ollama serve &\n\
63
  \n\