FROM nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04 ENV DEBIAN_FRONTEND=noninteractive ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 # Install system dependencies as root RUN apt-get update && apt-get install -y \ python3 python3-pip ffmpeg curl git wget sudo \ && rm -rf /var/lib/apt/lists/* # Create Python symlink before switching user RUN ln -s /usr/bin/python3 /usr/bin/python RUN pip install --upgrade pip # Create non-root user and switch RUN useradd -ms /bin/bash ollama USER ollama WORKDIR /home/ollama # Set up Python RUN ln -s /usr/bin/python3 /usr/bin/python RUN pip install --upgrade pip # Install Ollama (under user home, not root) RUN curl -fsSL https://ollama.com/install.sh | sh # Set up app directory under user space WORKDIR /home/ollama/app COPY requirements.txt . RUN pip install -r requirements.txt COPY . . # Download Hugging Face interface file RUN wget -O src/custome_interface.py https://huggingface.co/Jzuluaga/accent-id-commonaccent_xlsr-en-english/resolve/main/custom_interface.py # Expose port for Streamlit EXPOSE 8501 # Start Ollama and the app CMD bash -c "ollama serve & sleep 5 && ollama pull gemma3 && streamlit run streamlit_app.py --server.port=8501 --server.address=0.0.0.0"