Spaces:
Runtime error
Runtime error
Commit
·
7918532
1
Parent(s):
627a9fc
Update docker file
Browse files- Dockerfile +24 -7
Dockerfile
CHANGED
@@ -105,6 +105,14 @@
|
|
105 |
# Use a base image with Python
|
106 |
FROM python:3.10
|
107 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
# Set the working directory
|
109 |
WORKDIR /app
|
110 |
|
@@ -115,9 +123,11 @@ RUN useradd -m -u 1000 appuser
|
|
115 |
RUN apt update && apt install -y curl && \
|
116 |
curl -fsSL https://ollama.ai/install.sh | sh
|
117 |
|
118 |
-
# Create
|
119 |
RUN mkdir -p /home/appuser/.ollama && \
|
120 |
-
|
|
|
|
|
121 |
|
122 |
# Install Python dependencies
|
123 |
COPY requirements.txt requirements.txt
|
@@ -128,12 +138,19 @@ COPY . .
|
|
128 |
|
129 |
# Create a more robust startup script
|
130 |
RUN echo '#!/bin/bash\n\
|
|
|
|
|
|
|
|
|
|
|
131 |
echo "Starting Ollama server..."\n\
|
132 |
ollama serve &\n\
|
|
|
133 |
\n\
|
134 |
# Wait for Ollama server to be ready\n\
|
|
|
135 |
until curl -s http://localhost:11434/api/tags >/dev/null; do\n\
|
136 |
-
echo "Waiting for Ollama server
|
137 |
sleep 2\n\
|
138 |
done\n\
|
139 |
\n\
|
@@ -141,8 +158,8 @@ echo "Pulling Mistral model..."\n\
|
|
141 |
ollama pull mistral\n\
|
142 |
\n\
|
143 |
echo "Starting FastAPI application..."\n\
|
144 |
-
uvicorn main:app --host 0.0.0.0 --port 7860
|
145 |
-
|
146 |
|
147 |
# Set ownership of the application files
|
148 |
RUN chown -R appuser:appuser /app
|
@@ -150,8 +167,8 @@ RUN chown -R appuser:appuser /app
|
|
150 |
# Switch to non-root user
|
151 |
USER appuser
|
152 |
|
153 |
-
# Expose
|
154 |
-
EXPOSE 7860
|
155 |
|
156 |
# Set the HOME environment variable
|
157 |
ENV HOME=/home/appuser
|
|
|
105 |
# Use a base image with Python
|
106 |
FROM python:3.10
|
107 |
|
108 |
+
# Set environment variables
|
109 |
+
ENV PYTHONUNBUFFERED=1
|
110 |
+
ENV PYTHONPATH=/app
|
111 |
+
ENV OLLAMA_HOST=0.0.0.0
|
112 |
+
ENV TRANSFORMERS_CACHE=/home/appuser/.cache/huggingface
|
113 |
+
ENV HF_HOME=/home/appuser/.cache/huggingface
|
114 |
+
ENV API_BASE_URL=http://localhost:8000
|
115 |
+
|
116 |
# Set the working directory
|
117 |
WORKDIR /app
|
118 |
|
|
|
123 |
RUN apt update && apt install -y curl && \
|
124 |
curl -fsSL https://ollama.ai/install.sh | sh
|
125 |
|
126 |
+
# Create necessary directories and set permissions
|
127 |
RUN mkdir -p /home/appuser/.ollama && \
|
128 |
+
mkdir -p /home/appuser/.cache/huggingface && \
|
129 |
+
chown -R appuser:appuser /home/appuser/.ollama && \
|
130 |
+
chown -R appuser:appuser /home/appuser/.cache
|
131 |
|
132 |
# Install Python dependencies
|
133 |
COPY requirements.txt requirements.txt
|
|
|
138 |
|
139 |
# Create a more robust startup script
|
140 |
RUN echo '#!/bin/bash\n\
|
141 |
+
set -e\n\
|
142 |
+
\n\
|
143 |
+
echo "Starting Healthcare AI Assistant..."\n\
|
144 |
+
\n\
|
145 |
+
# Start Ollama server\n\
|
146 |
echo "Starting Ollama server..."\n\
|
147 |
ollama serve &\n\
|
148 |
+
OLLAMA_PID=$!\n\
|
149 |
\n\
|
150 |
# Wait for Ollama server to be ready\n\
|
151 |
+
echo "Waiting for Ollama server to be ready..."\n\
|
152 |
until curl -s http://localhost:11434/api/tags >/dev/null; do\n\
|
153 |
+
echo "Waiting for Ollama server..."\n\
|
154 |
sleep 2\n\
|
155 |
done\n\
|
156 |
\n\
|
|
|
158 |
ollama pull mistral\n\
|
159 |
\n\
|
160 |
echo "Starting FastAPI application..."\n\
|
161 |
+
exec uvicorn main:app --host 0.0.0.0 --port 7860 --log-level info\n\
|
162 |
+
' > start.sh && chmod +x start.sh
|
163 |
|
164 |
# Set ownership of the application files
|
165 |
RUN chown -R appuser:appuser /app
|
|
|
167 |
# Switch to non-root user
|
168 |
USER appuser
|
169 |
|
170 |
+
# Expose both ports
|
171 |
+
EXPOSE 7860 11434
|
172 |
|
173 |
# Set the HOME environment variable
|
174 |
ENV HOME=/home/appuser
|