This commit is contained in:
Markos Gogoulos 2025-09-17 10:09:48 +03:00
parent 8d982ace92
commit 78b8eb0bad
5 changed files with 53 additions and 2 deletions

View File

@ -38,6 +38,7 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
RUN apt-get update -y && \
apt-get -y upgrade && \
apt-get install --no-install-recommends -y \
curl \
supervisor \
nginx \
imagemagick \
@ -95,7 +96,8 @@ ENV ENABLE_UWSGI='yes' \
ENABLE_CELERY_BEAT='yes' \
ENABLE_CELERY_SHORT='yes' \
ENABLE_CELERY_LONG='yes' \
ENABLE_MIGRATIONS='yes'
ENABLE_MIGRATIONS='yes' \
ENABLE_OLLAMA='no'
EXPOSE 9000 80
@ -106,8 +108,12 @@ CMD ["./deploy/docker/start.sh"]
############ FULL IMAGE ############
FROM base AS full
ENV ENABLE_OLLAMA='yes'
COPY requirements-full.txt ./
RUN mkdir -p /root/.cache/ && \
chmod go+rwx /root/ && \
chmod go+rwx /root/.cache/
RUN uv pip install -r requirements-full.txt
RUN curl -fsSL https://ollama.com/install.sh | sh

View File

@ -17,3 +17,8 @@ build-frontend:
test:
docker compose -f docker-compose-dev.yaml exec --env TESTING=True -T web pytest
build-base-image:
docker build -t mediacms/mediacms:latest --target base .
build-full-image:
docker build -t mediacms/mediacms:full --target full .

View File

@ -69,3 +69,37 @@ if [ X"$ENABLE_CELERY_LONG" = X"yes" ] ; then
cp deploy/docker/supervisord/supervisord-celery_long.conf /etc/supervisor/conf.d/supervisord-celery_long.conf
rm /var/run/mediacms/* -f # remove any stale id, so that on forced restarts of celery workers there are no stale processes that prevent new ones
fi
if [ X"$ENABLE_OLLAMA" = X"yes" ] ; then
echo "Starting ollama to pull models..."
ollama serve &
OLLAMA_PID=$!
# Wait for ollama to be ready
retries=10
echo "Waiting for ollama to start..."
while [ $retries -gt 0 ] && ! curl -s http://127.0.0.1:11434/ > /dev/null 2>&1; do
sleep 1
retries=$((retries-1))
done
if [ $retries -eq 0 ]; then
echo "Ollama did not start in time. Killing process."
kill $OLLAMA_PID
wait $OLLAMA_PID 2>/dev/null
echo "Failed to start ollama for model pulling. The main ollama service will be started later."
else
echo "Ollama is up. Checking for llama3.2 model."
if ! ollama list | grep -q "llama3.2"; then
echo "llama3.2 model not found, pulling it..."
ollama pull llama3.2
else
echo "llama3.2 model already exists."
fi
echo "Stopping temporary ollama service."
kill $OLLAMA_PID
wait $OLLAMA_PID 2>/dev/null
fi
fi

View File

@ -11,6 +11,11 @@ else
echo "There is no script $PRE_START_PATH"
fi
if [ X"$ENABLE_OLLAMA" = X"yes" ] ; then
echo "Starting ollama service in background..."
ollama serve &
fi
# Start Supervisor, with Nginx and uWSGI
echo "Starting server using supervisord..."

View File

@ -1,2 +1,3 @@
openai-whisper==20250625
setuptools-rust
ollama