Spaces:
Sleeping
Sleeping
# # Use a base image with Python | |
# FROM python:3.10 | |
# # Set the working directory | |
# WORKDIR /app | |
# # Create a non-root user | |
# RUN useradd -m -u 1000 appuser | |
# # Install system dependencies | |
# RUN apt update && apt install -y curl && \ | |
# curl -fsSL https://ollama.ai/install.sh | sh | |
# # Create .ollama directory and set permissions | |
# RUN mkdir -p /home/appuser/.ollama && \ | |
# chown -R appuser:appuser /home/appuser/.ollama | |
# # Install Python dependencies | |
# COPY requirements.txt requirements.txt | |
# RUN pip install --no-cache-dir -r requirements.txt | |
# # Copy application files | |
# COPY . . | |
# # Create a more robust startup script | |
# RUN echo '#!/bin/bash\n\ | |
# echo "Starting Ollama server..."\n\ | |
# ollama serve &\n\ | |
# \n\ | |
# # Wait for Ollama server to be ready\n\ | |
# until curl -s http://localhost:11434/api/tags >/dev/null; do\n\ | |
# echo "Waiting for Ollama server to be ready..."\n\ | |
# sleep 2\n\ | |
# done\n\ | |
# \n\ | |
# echo "Pulling Mistral model..."\n\ | |
# ollama pull mistral\n\ | |
# \n\ | |
# echo "Starting FastAPI application..."\n\ | |
# uvicorn main:app --host 0.0.0.0 --port 7860' > start.sh && \ | |
# chmod +x start.sh | |
# # Set ownership of the application files | |
# RUN chown -R appuser:appuser /app | |
# # Switch to non-root user | |
# USER appuser | |
# # Expose the port FastAPI will run on | |
# EXPOSE 7860 | |
# # Set the HOME environment variable | |
# ENV HOME=/home/appuser | |
# # Run the startup script | |
# CMD ["./start.sh"] | |
# First docker for Hugging Face AI Agent with Ollama and FastAPI | |
# Use a base image with Python | |
FROM python:3.10 | |
# Set the working directory | |
WORKDIR /app | |
# Create a non-root user | |
RUN useradd -m -u 1000 appuser | |
# Install system dependencies | |
RUN apt-get update && apt-get install -y --no-install-recommends curl && \ | |
curl -fsSL https://ollama.ai/install.sh | sh && \ | |
rm -rf /var/lib/apt/lists/* | |
# Create .ollama directory and set permissions | |
RUN mkdir -p /home/appuser/.ollama && \ | |
chown -R appuser:appuser /home/appuser/.ollama | |
# Install Python dependencies | |
COPY requirements.txt . | |
RUN pip install --no-cache-dir -r requirements.txt | |
# Copy application files | |
COPY --chown=appuser:appuser . . | |
# Create a more robust startup script | |
RUN echo '#!/bin/bash\n\ | |
set -e\n\ | |
\n\ | |
# Start Ollama server in background\n\ | |
echo "Starting Ollama server..."\n\ | |
ollama serve &\n\ | |
\n\ | |
# Wait for Ollama server to be ready\n\ | |
echo "Waiting for Ollama server to be ready..."\n\ | |
while ! curl -s http://localhost:11434 >/dev/null; do\n\ | |
sleep 2\n\ | |
done\n\ | |
\n\ | |
echo "Pulling gemma3 model..."\n\ | |
ollama pull gemma3\n\ | |
\n\ | |
echo "Starting FastAPI application..."\n\ | |
exec uvicorn main:app --host 0.0.0.0 --port 7860' > start.sh && \ | |
chmod +x start.sh | |
# Set ownership of the application files | |
RUN chown -R appuser:appuser /app | |
# Switch to non-root user | |
USER appuser | |
# Expose both Ollama and FastAPI ports | |
EXPOSE 7860 11434 | |
# Set environment variables | |
ENV HOME=/home/appuser | |
ENV OLLAMA_HOST=0.0.0.0 | |
ENV PYTHONUNBUFFERED=1 | |
# Run the startup script | |
CMD ["./start.sh"] |