Can't see any results from Celery Beat in Django Project
I want to run scheduled tasks in my django backend with celery and celery beat. I have a Dockerfile config for both celery container beside the normal django container.
I cant see any results from the execution. Does anyone have an idea?
both with this config, just one with "worker" and one with "beat" in last line:
# Use an official Python runtime as a parent image
FROM python:3.11-slim
ENV PYTHONUNBUFFERED 1
ENV APP_HOME=/usr/src/app
# Set the working directory in the container
WORKDIR /usr/src/app
# Install Pip Packages
RUN pip install --upgrade pip
COPY server/requirements.txt $APP_HOME
RUN pip install --no-cache-dir -r requirements.txt
# Copy the rest of the application code into the container
COPY server $APP_HOME
# Define environment variable
ENV DJANGO_SETTINGS_MODULE=musterteile_server.settings
# Create a non-root user and change ownership of APP_HOME
RUN addgroup --system appgroup && adduser --system --ingroup appgroup appuser && \
chown -R appuser:appgroup $APP_HOME
# Switch to the non-root user
USER appuser
# Run Celery worker
CMD ["celery", "-A", "musterteile_server.celery", "worker", "-E", "--loglevel=info"]
musterteile_server/settings.py has the config for celery with a beat schedule for a test task.
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
CELERY_RESULT_BACKEND = os.environ.get(
"CELERY_RESULT_BACKEND", "redis://localhost:6379/0"
)
CELERY_CONFIG_MODULE = os.environ.get(
"CELERY_CONFIG_MODULE", "musterteile_server.celery"
)
CELERY_TIMEZONE = os.getenv("TIME_ZONE", "CET")
CELERY_BEAT_SCHEDULE = {
"my_task": {
"task": "api.tasks.my_task",
"schedule": datetime.timedelta(seconds=30),
},
}
Test Task in api/tasks.py
from celery import shared_task
@shared_task
def my_task():
print("I'm a scheduled task!")
musterteile_server/celery.py
from __future__ import absolute_import, unicode_literals
import os
import datetime
from celery import Celery
from celery.schedules import crontab
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "musterteile_server.settings")
# Create a Celery instance and configure it using the settings from Django.
app = Celery("musterteile_server")
# Load task modules from all registered Django app configs.
app.config_from_object("django.conf:settings", namespace="CELERY")
# Set broker_connection_retry_on_startup directly in the Celery app configuration.
app.conf.broker_connection_retry_on_startup = True
# Auto-discover tasks in all installed apps.
app.autodiscover_tasks()
app.autodiscover_tasks(["api.tasks"])
musterteile_server/init.py
from .celery import app as celery_app
__all__ = ("celery_app",)
All Containers are in the docker-compose.yaml, here a part of the file:
services:
celery-worker:
build:
context: .
dockerfile: docker/Dockerfile.celery-worker
container_name: muster_celery_worker
env_file:
- .env
environment:
- CELERY_CONFIG_MODULE=${CELERY_CONFIG_MODULE}
- CELERY_BROKER_URL=${CELERY_BROKER_URL}
- CELERY_RESULT_BACKEND=${CELERY_RESULT_BACKEND}
- REDIS_HOST=redis
- DATABASE_HOST=db
volumes:
- ./docker/freetds.conf:/etc/freetds/freetds.conf:ro
depends_on:
- backend
- redis
networks:
- main
celery-beat:
build:
context: .
dockerfile: docker/Dockerfile.celery-beat
container_name: muster_celery_beat
env_file:
- .env
environment:
- CELERY_CONFIG_MODULE=${CELERY_CONFIG_MODULE}
- CELERY_BROKER_URL=${CELERY_BROKER_URL}
- CELERY_RESULT_BACKEND=${CELERY_RESULT_BACKEND}
- REDIS_HOST=redis
- DATABASE_HOST=db
volumes:
- ./docker/freetds.conf:/etc/freetds/freetds.conf:ro
depends_on:
- backend
- celery-worker
networks:
- main
redis:
image: "redis:latest"
container_name: muster_redis
env_file:
- .env
environment:
- REDIS_HOST=redis
volumes:
- redis_data:/var/lib/redis/data
ports:
- 6379:6379
networks:
- main
networks:
main:
volumes:
redis_data: