Compare commits

...

1 Commits

Author SHA1 Message Date
sriram veeraghanta
6b72350ae3 fix: adding celery backend and queues 2025-05-30 14:18:19 +05:30
3 changed files with 30 additions and 4 deletions

View File

@@ -10,6 +10,7 @@ from celery.schedules import crontab
# Module imports
from plane.settings.redis import redis_instance
from plane.celery_task_routes import task_routes
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
@@ -18,6 +19,28 @@ ri = redis_instance()
app = Celery("plane")
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
app.conf.update(
task_routes,
task_default_queue="default",
task_queues={
"high": {
"exchange": "high",
"routing_key": "high",
},
"default": {
"exchange": "default",
"routing_key": "default",
},
"low": {
"exchange": "low",
"routing_key": "low",
},
},
)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object("django.conf:settings", namespace="CELERY")
@@ -77,7 +100,4 @@ def setup_task_loggers(logger, *args, **kwargs):
logger.addHandler(handler)
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
app.conf.beat_scheduler = "django_celery_beat.schedulers.DatabaseScheduler"

View File

@@ -0,0 +1,5 @@
task_routes = {
"plane.bgtasks.email_notification_task.stack_email_notification": {
"queue": "low",
},
}

View File

@@ -247,12 +247,13 @@ if AMQP_URL:
else:
CELERY_BROKER_URL = f"amqp://{RABBITMQ_USER}:{RABBITMQ_PASSWORD}@{RABBITMQ_HOST}:{RABBITMQ_PORT}/{RABBITMQ_VHOST}"
CELERY_CACHE_BACKEND = "django-cache"
CELERY_RESULT_BACKEND = "django-db"
CELERY_TIMEZONE = TIME_ZONE
CELERY_TASK_SERIALIZER = "json"
CELERY_RESULT_SERIALIZER = "json"
CELERY_ACCEPT_CONTENT = ["application/json"]
CELERY_IMPORTS = (
# scheduled tasks
"plane.bgtasks.issue_automation_task",