From 60b886a224c0e707924158379e7a31e76ed38b56 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Wed, 21 Feb 2024 17:30:01 +0000 Subject: [PATCH 01/11] Add celery-beat to demodjango --- Procfile | 1 + app/views.py | 8 +++++ celery_worker/tasks.py | 6 ++++ demodjango/celery.py | 7 ++++ demodjango/settings.py | 2 ++ docker-compose.yml | 20 ++++++++++- poetry.lock | 80 ++++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 1 + 8 files changed, 122 insertions(+), 3 deletions(-) diff --git a/Procfile b/Procfile index e67616a..c08d91f 100644 --- a/Procfile +++ b/Procfile @@ -1,3 +1,4 @@ web: python migrate.py && python manage.py load_defaults && opentelemetry-instrument gunicorn -b 0.0.0.0:$PORT demodjango.wsgi:application celery-worker: celery --app demodjango.celery worker --task-events --loglevel INFO +celery-beat: celery --app demodjango.celery beat --loglevel INFO check: python manage.py check diff --git a/app/views.py b/app/views.py index 8c19ae6..0c78867 100644 --- a/app/views.py +++ b/app/views.py @@ -19,6 +19,7 @@ logger = logging.getLogger("django") CELERY = 'celery' +BEAT = 'beat' GIT_INFORMATION = 'git_information' OPENSEARCH = 'opensearch' POSTGRES_AURORA = 'postgres_aurora' @@ -30,6 +31,7 @@ HTTP_CONNECTION = 'http' ALL_CHECKS = { + BEAT: 'Celery Beat', CELERY: 'Celery Worker', GIT_INFORMATION: 'Git information', OPENSEARCH: 'OpenSearch', @@ -62,6 +64,7 @@ def index(request): S3: s3_bucket_check, OPENSEARCH: opensearch_check, CELERY: celery_worker_check, + BEAT: celery_beat_check, HTTP_CONNECTION: http_check, } @@ -193,6 +196,11 @@ def get_result_from_celery_backend(): return render_connection_info(addon_type, False, str(e)) +def celery_beat_check(): + addon_type = ALL_CHECKS[BEAT] + return render_connection_info(addon_type, False, "") + + def git_information(): git_commit = os.environ.get("GIT_COMMIT", "Unknown") git_branch = os.environ.get("GIT_BRANCH", "Unknown") diff --git a/celery_worker/tasks.py b/celery_worker/tasks.py index a5d31bd..c08ddbf 100644 --- a/celery_worker/tasks.py +++ b/celery_worker/tasks.py @@ -9,3 +9,9 @@ def demodjango_task(timestamp): logger.info("Running demodjango_task") return f"demodjango_task queued at {timestamp}" + + +@shared_task() +def demodjango_scheduled_task(timestamp): + logger.info("Running demodjango_scheduled_task") + return f"demodjango_scheduled_task queued at {timestamp}" diff --git a/demodjango/celery.py b/demodjango/celery.py index 09c76bf..b27f1d1 100644 --- a/demodjango/celery.py +++ b/demodjango/celery.py @@ -10,3 +10,10 @@ celery_app.autodiscover_tasks() celery_app = healthcheck.setup(celery_app) + +# celery_app.conf.beat_schedule = { +# "schedule-demodjango-task": { +# "task": "celery_worker.tasks.demodjango_scheduled_task", +# "schedule": crontab(hour=0, minute=1), +# }, +# } diff --git a/demodjango/settings.py b/demodjango/settings.py index ec0bfee..a1d07a6 100644 --- a/demodjango/settings.py +++ b/demodjango/settings.py @@ -102,6 +102,7 @@ # Application definition INSTALLED_APPS = [ + 'django_celery_beat', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', @@ -222,3 +223,4 @@ CELERY_ACCEPT_CONTENT = ["application/json"] CELERY_RESULT_SERIALIZER = "json" CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True +CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers.DatabaseScheduler" diff --git a/docker-compose.yml b/docker-compose.yml index f595069..ac4c08f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -54,7 +54,7 @@ services: cache_from: - demodjango/application:latest image: demodjango/application:latest - command: celery --app demodjango.celery worker --task-events --loglevel INFO + command: celery_worker --app demodjango.celery_worker worker --task-events --loglevel INFO entrypoint: '' volumes: - .:/app @@ -72,6 +72,24 @@ services: DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production DJANGO_SETTINGS_MODULE: demodjango.settings + celery-beat: + build: + context: . + cache_from: + - demodjango/application:latest + image: demodjango/application:latest + command: celery_worker --app demodjango.celery_worker beat --loglevel INFO + entrypoint: '' + volumes: + - .:/app + depends_on: + - redis + environment: + REDIS_ENDPOINT: redis://redis:6379 + DEBUG: true + DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production + DJANGO_SETTINGS_MODULE: demodjango.settings + postgres-rds: image: postgres environment: diff --git a/poetry.lock b/poetry.lock index 0be0317..91ce3a7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "amqp" @@ -396,6 +396,20 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "cron-descriptor" +version = "1.4.3" +description = "A Python library that converts cron expressions into human readable strings." +optional = false +python-versions = "*" +files = [ + {file = "cron_descriptor-1.4.3-py3-none-any.whl", hash = "sha256:a67ba21804983b1427ed7f3e1ec27ee77bf24c652b0430239c268c5ddfbf9dc0"}, + {file = "cron_descriptor-1.4.3.tar.gz", hash = "sha256:7b1a00d7d25d6ae6896c0da4457e790b98cba778398a3d48e341e5e0d33f0488"}, +] + +[package.extras] +dev = ["polib"] + [[package]] name = "dbt-copilot-python" version = "0.2.0" @@ -479,6 +493,25 @@ tzdata = {version = "*", markers = "sys_platform == \"win32\""} argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] +[[package]] +name = "django-celery-beat" +version = "2.5.0" +description = "Database-backed Periodic Tasks." +optional = false +python-versions = "*" +files = [ + {file = "django-celery-beat-2.5.0.tar.gz", hash = "sha256:cd0a47f5958402f51ac0c715bc942ae33d7b50b4e48cba91bc3f2712be505df1"}, + {file = "django_celery_beat-2.5.0-py3-none-any.whl", hash = "sha256:ae460faa5ea142fba0875409095d22f6bd7bcc7377889b85e8cab5c0dfb781fe"}, +] + +[package.dependencies] +celery = ">=5.2.3,<6.0" +cron-descriptor = ">=1.2.32" +Django = ">=2.2,<5.0" +django-timezone-field = ">=5.0" +python-crontab = ">=2.3.4" +tzdata = "*" + [[package]] name = "django-environ" version = "0.11.2" @@ -540,6 +573,20 @@ setuptools = "*" [package.extras] dev = ["pytest"] +[[package]] +name = "django-timezone-field" +version = "6.1.0" +description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "django_timezone_field-6.1.0-py3-none-any.whl", hash = "sha256:0095f43da716552fcc606783cfb42cb025892514f1ec660ebfa96186eb83b74c"}, + {file = "django_timezone_field-6.1.0.tar.gz", hash = "sha256:d40f7059d7bae4075725d04a9dae601af9fe3c7f0119a69b0e2c6194a782f797"}, +] + +[package.dependencies] +Django = ">=3.2,<6.0" + [[package]] name = "djangorestframework" version = "3.14.0" @@ -1526,6 +1573,24 @@ pytest = ">=6.2.4,<9.0.0" pytest-base-url = ">=1.0.0,<3.0.0" python-slugify = ">=6.0.0,<9.0.0" +[[package]] +name = "python-crontab" +version = "3.0.0" +description = "Python Crontab API" +optional = false +python-versions = "*" +files = [ + {file = "python-crontab-3.0.0.tar.gz", hash = "sha256:79fb7465039ddfd4fb93d072d6ee0d45c1ac8bf1597f0686ea14fd4361dba379"}, + {file = "python_crontab-3.0.0-py3-none-any.whl", hash = "sha256:6d5ba3c190ec76e4d252989a1644fcb233dbf53fbc8fceeb9febe1657b9fb1d4"}, +] + +[package.dependencies] +python-dateutil = "*" + +[package.extras] +cron-description = ["cron-descriptor"] +cron-schedule = ["croniter"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -1594,6 +1659,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1601,8 +1667,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1619,6 +1693,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1626,6 +1701,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1961,4 +2037,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "5619190c8fd5c26c7c5bb0c19d231397c1bb0e6db789742cfc5a8960336bb34a" +content-hash = "6ce36cde28c90a0048ded3120fba55fce456ee8bbf6c7430002f0e47389b5648" diff --git a/pyproject.toml b/pyproject.toml index 3af96c0..6f0de8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ tenacity = "^8.2.3" whitenoise = "^6.6.0" normality = "^2.5.0" jinja2 = "^3.1.3" +django-celery-beat = "^2.5.0" [tool.poetry.group.dev.dependencies] pytest-playwright = "^0.4.4" From cee4c5da29acf31f4576b4b048746d37c0c467c0 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Thu, 22 Feb 2024 16:30:42 +0000 Subject: [PATCH 02/11] Add scheduled task --- app/views.py | 14 ++++++++++++++ demodjango/celery.py | 13 +++++++------ docker-compose.yml | 4 ++-- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/app/views.py b/app/views.py index 0c78867..518b296 100644 --- a/app/views.py +++ b/app/views.py @@ -9,6 +9,7 @@ from django.conf import settings from django.db import connections from django.http import HttpResponse +from django_celery_beat.models import IntervalSchedule, PeriodicTask from opensearchpy import OpenSearch from tenacity import retry, stop_after_delay, RetryError, wait_fixed @@ -198,6 +199,19 @@ def get_result_from_celery_backend(): def celery_beat_check(): addon_type = ALL_CHECKS[BEAT] + timestamp = datetime.utcnow() + + interval, _ = IntervalSchedule.objects.get_or_create( + every=10, + period=IntervalSchedule.SECONDS + ) + + PeriodicTask.objects.create( + interval=interval, + name=f"my-task-{timestamp}", + task="celery_worker.tasks.demodjango_scheduled_task" + ) + return render_connection_info(addon_type, False, "") diff --git a/demodjango/celery.py b/demodjango/celery.py index b27f1d1..04b44e2 100644 --- a/demodjango/celery.py +++ b/demodjango/celery.py @@ -1,6 +1,7 @@ import os from celery import Celery +from celery.schedules import crontab from dbt_copilot_python.celery_health_check import healthcheck os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demodjango.settings") @@ -11,9 +12,9 @@ celery_app = healthcheck.setup(celery_app) -# celery_app.conf.beat_schedule = { -# "schedule-demodjango-task": { -# "task": "celery_worker.tasks.demodjango_scheduled_task", -# "schedule": crontab(hour=0, minute=1), -# }, -# } +celery_app.conf.beat_schedule = { + "schedule-demodjango-task": { + "task": "celery_worker.tasks.demodjango_scheduled_task", + "schedule": crontab(hour=0, minute=1), + }, +} diff --git a/docker-compose.yml b/docker-compose.yml index ac4c08f..3ff6422 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -54,7 +54,7 @@ services: cache_from: - demodjango/application:latest image: demodjango/application:latest - command: celery_worker --app demodjango.celery_worker worker --task-events --loglevel INFO + command: celery --app demodjango.celery worker --task-events --loglevel INFO entrypoint: '' volumes: - .:/app @@ -78,7 +78,7 @@ services: cache_from: - demodjango/application:latest image: demodjango/application:latest - command: celery_worker --app demodjango.celery_worker beat --loglevel INFO + command: bash -c "python manage.py migrate && celery --app demodjango.celery beat --loglevel INFO" entrypoint: '' volumes: - .:/app From 76953ead2121f5f1fb12b775972a93a5ae61ccf4 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Thu, 7 Mar 2024 16:27:52 +0000 Subject: [PATCH 03/11] Comment out manual task schedule --- app/views.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/app/views.py b/app/views.py index 518b296..e7161f2 100644 --- a/app/views.py +++ b/app/views.py @@ -201,16 +201,16 @@ def celery_beat_check(): addon_type = ALL_CHECKS[BEAT] timestamp = datetime.utcnow() - interval, _ = IntervalSchedule.objects.get_or_create( - every=10, - period=IntervalSchedule.SECONDS - ) - - PeriodicTask.objects.create( - interval=interval, - name=f"my-task-{timestamp}", - task="celery_worker.tasks.demodjango_scheduled_task" - ) + # interval, _ = IntervalSchedule.objects.get_or_create( + # every=10, + # period=IntervalSchedule.SECONDS + # ) + # + # PeriodicTask.objects.create( + # interval=interval, + # name=f"my-task-{timestamp}", + # task="celery_worker.tasks.demodjango_scheduled_task" + # ) return render_connection_info(addon_type, False, "") From 0fbe0ba848c9b11d74938cd0bca1a58cb2402e76 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Thu, 7 Mar 2024 16:35:39 +0000 Subject: [PATCH 04/11] Remove unused import --- app/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/views.py b/app/views.py index e7161f2..343f9bd 100644 --- a/app/views.py +++ b/app/views.py @@ -9,7 +9,7 @@ from django.conf import settings from django.db import connections from django.http import HttpResponse -from django_celery_beat.models import IntervalSchedule, PeriodicTask +# from django_celery_beat.models import IntervalSchedule, PeriodicTask from opensearchpy import OpenSearch from tenacity import retry, stop_after_delay, RetryError, wait_fixed From 709c63def05cea4ddd39e2226aee316acea5facd Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Mon, 11 Mar 2024 09:58:12 +0000 Subject: [PATCH 05/11] Add migrate command to celery beat --- Procfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Procfile b/Procfile index c08d91f..80abbcc 100644 --- a/Procfile +++ b/Procfile @@ -1,4 +1,4 @@ web: python migrate.py && python manage.py load_defaults && opentelemetry-instrument gunicorn -b 0.0.0.0:$PORT demodjango.wsgi:application celery-worker: celery --app demodjango.celery worker --task-events --loglevel INFO -celery-beat: celery --app demodjango.celery beat --loglevel INFO +celery-beat: python manage.py migrate && celery --app demodjango.celery beat --loglevel INFO check: python manage.py check From d41b1957f1ba023055fe6eb39e3d0d04cdbd49f2 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Mon, 11 Mar 2024 15:16:45 +0000 Subject: [PATCH 06/11] Swap default database to RDS keep SQLITE as additional database --- app/views.py | 4 ++-- demodjango/settings.py | 12 ++++-------- docker-compose.yml | 6 +++++- entrypoint.sh | 2 +- migrate.py | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/app/views.py b/app/views.py index 343f9bd..3c0a13f 100644 --- a/app/views.py +++ b/app/views.py @@ -95,7 +95,7 @@ def server_time_check(): def postgres_rds_check(): addon_type = ALL_CHECKS[POSTGRES_RDS] try: - with connections['rds'].cursor() as c: + with connections['default'].cursor() as c: c.execute('SELECT version()') return render_connection_info(addon_type, True, c.fetchone()[0]) except Exception as e: @@ -115,7 +115,7 @@ def postgres_aurora_check(): def sqlite_check(): addon_type = ALL_CHECKS[SQLITE] try: - with connections['default'].cursor() as c: + with connections['sqlite'].cursor() as c: c.execute('SELECT SQLITE_VERSION()') return render_connection_info(addon_type, True, c.fetchone()[0]) except Exception as e: diff --git a/demodjango/settings.py b/demodjango/settings.py index a1d07a6..cd54aa5 100644 --- a/demodjango/settings.py +++ b/demodjango/settings.py @@ -151,19 +151,15 @@ sqlite_db_root = BASE_DIR if is_copilot() else Path(tempfile.gettempdir()) DATABASES = { - 'default': { + "default": dj_database_url.config( + default=database_url_from_env("RDS_DATABASE_CREDENTIALS") + ), + "sqlite": { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': sqlite_db_root / "demodjango.sqlite3", } } -RDS_DATABASE_CREDENTIALS = os.getenv("RDS_DATABASE_CREDENTIALS", "") - -if RDS_DATABASE_CREDENTIALS: - DATABASES["rds"] = dj_database_url.config( - default=database_url_from_env("RDS_DATABASE_CREDENTIALS") - ) - DATABASE_CREDENTIALS = os.getenv("DATABASE_CREDENTIALS", "") if DATABASE_CREDENTIALS: diff --git a/docker-compose.yml b/docker-compose.yml index 3ff6422..ba63fe5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -66,11 +66,13 @@ services: start_period: 5s depends_on: - redis + - postgres-rds environment: REDIS_ENDPOINT: redis://redis:6379 DEBUG: true DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production DJANGO_SETTINGS_MODULE: demodjango.settings + RDS_DATABASE_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' celery-beat: build: @@ -78,17 +80,19 @@ services: cache_from: - demodjango/application:latest image: demodjango/application:latest - command: bash -c "python manage.py migrate && celery --app demodjango.celery beat --loglevel INFO" + command: celery --app demodjango.celery beat --loglevel INFO entrypoint: '' volumes: - .:/app depends_on: - redis + - postgres-rds environment: REDIS_ENDPOINT: redis://redis:6379 DEBUG: true DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production DJANGO_SETTINGS_MODULE: demodjango.settings + RDS_DATABASE_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' postgres-rds: image: postgres diff --git a/entrypoint.sh b/entrypoint.sh index 49699c0..d09fc07 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -2,7 +2,7 @@ dockerize -wait tcp://opensearch:9200 -timeout 60s python manage.py migrate -python manage.py migrate --database rds +python manage.py migrate --database sqlite python manage.py migrate --database aurora python manage.py load_defaults diff --git a/migrate.py b/migrate.py index 450ed22..595c433 100644 --- a/migrate.py +++ b/migrate.py @@ -6,7 +6,7 @@ migrations = ["python manage.py migrate"] optional_migrations = { - "postgres_rds": "python manage.py migrate --database rds", + "postgres_sqlite": "python manage.py migrate --database sqlite", "postgres_aurora": "python manage.py migrate --database aurora", } From 558047545361d319fc9f9bed177a065503875509 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Tue, 12 Mar 2024 11:44:01 +0000 Subject: [PATCH 07/11] Change schedule to 30s interval; Save scheduled task to database; Read latest sheduled task on status page; --- app/migrations/0003_scheduledtask.py | 21 +++++++++++++++++++++ app/models.py | 5 +++++ app/views.py | 23 ++++++++--------------- celery_worker/tasks.py | 16 +++++++++++++--- demodjango/celery.py | 3 +-- 5 files changed, 48 insertions(+), 20 deletions(-) create mode 100644 app/migrations/0003_scheduledtask.py diff --git a/app/migrations/0003_scheduledtask.py b/app/migrations/0003_scheduledtask.py new file mode 100644 index 0000000..a5ad47b --- /dev/null +++ b/app/migrations/0003_scheduledtask.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.9 on 2024-03-12 11:30 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('app', '0002_sampletable_sample_email'), + ] + + operations = [ + migrations.CreateModel( + name='ScheduledTask', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('taskid', models.CharField(max_length=50)), + ('timestamp', models.DateTimeField()), + ], + ), + ] diff --git a/app/models.py b/app/models.py index 4b88852..6cef25b 100644 --- a/app/models.py +++ b/app/models.py @@ -7,3 +7,8 @@ class SampleTable(models.Model): sampleid = models.CharField(max_length=90) sample_name = models.CharField(max_length=60) sample_email = models.EmailField(max_length=256, null=True) + + +class ScheduledTask(models.Model): + taskid = models.CharField(max_length=50) + timestamp = models.DateTimeField() diff --git a/app/views.py b/app/views.py index 3c0a13f..4913152 100644 --- a/app/views.py +++ b/app/views.py @@ -9,12 +9,12 @@ from django.conf import settings from django.db import connections from django.http import HttpResponse -# from django_celery_beat.models import IntervalSchedule, PeriodicTask from opensearchpy import OpenSearch from tenacity import retry, stop_after_delay, RetryError, wait_fixed from celery_worker.tasks import demodjango_task from .check.check_http import HTTPCheck +from .models import ScheduledTask from .util import render_connection_info logger = logging.getLogger("django") @@ -199,20 +199,13 @@ def get_result_from_celery_backend(): def celery_beat_check(): addon_type = ALL_CHECKS[BEAT] - timestamp = datetime.utcnow() - - # interval, _ = IntervalSchedule.objects.get_or_create( - # every=10, - # period=IntervalSchedule.SECONDS - # ) - # - # PeriodicTask.objects.create( - # interval=interval, - # name=f"my-task-{timestamp}", - # task="celery_worker.tasks.demodjango_scheduled_task" - # ) - - return render_connection_info(addon_type, False, "") + + try: + latest_task = ScheduledTask.objects.all().order_by('-timestamp').first() + connection_info = f"Latest task scheduled with task_id {latest_task.taskid} at {latest_task.timestamp}" + return render_connection_info(addon_type, True, connection_info) + except Exception as e: + return render_connection_info(addon_type, False, str(e)) def git_information(): diff --git a/celery_worker/tasks.py b/celery_worker/tasks.py index c08ddbf..d57b7a6 100644 --- a/celery_worker/tasks.py +++ b/celery_worker/tasks.py @@ -1,7 +1,10 @@ +from datetime import datetime import logging from celery import shared_task +from app.models import ScheduledTask + logger = logging.getLogger("django") @@ -11,7 +14,14 @@ def demodjango_task(timestamp): return f"demodjango_task queued at {timestamp}" -@shared_task() -def demodjango_scheduled_task(timestamp): - logger.info("Running demodjango_scheduled_task") +@shared_task(bind=True) +def demodjango_scheduled_task(self): + timestamp = datetime.utcnow() + + task = ScheduledTask() + task.taskid = self.request.id + task.timestamp = timestamp + task.save() + + logger.info(f"Running demodjango_scheduled_task") return f"demodjango_scheduled_task queued at {timestamp}" diff --git a/demodjango/celery.py b/demodjango/celery.py index 04b44e2..c0dd092 100644 --- a/demodjango/celery.py +++ b/demodjango/celery.py @@ -1,7 +1,6 @@ import os from celery import Celery -from celery.schedules import crontab from dbt_copilot_python.celery_health_check import healthcheck os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demodjango.settings") @@ -15,6 +14,6 @@ celery_app.conf.beat_schedule = { "schedule-demodjango-task": { "task": "celery_worker.tasks.demodjango_scheduled_task", - "schedule": crontab(hour=0, minute=1), + "schedule": 30.0, }, } From ebbe2f55e704a6e5ebe503bf03d52d356e9b8061 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Tue, 12 Mar 2024 12:04:07 +0000 Subject: [PATCH 08/11] Refactor imports --- app/views.py | 3 ++- celery_worker/tasks.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/app/views.py b/app/views.py index 4913152..6606226 100644 --- a/app/views.py +++ b/app/views.py @@ -14,7 +14,7 @@ from celery_worker.tasks import demodjango_task from .check.check_http import HTTPCheck -from .models import ScheduledTask + from .util import render_connection_info logger = logging.getLogger("django") @@ -198,6 +198,7 @@ def get_result_from_celery_backend(): def celery_beat_check(): + from .models import ScheduledTask addon_type = ALL_CHECKS[BEAT] try: diff --git a/celery_worker/tasks.py b/celery_worker/tasks.py index d57b7a6..cc0631c 100644 --- a/celery_worker/tasks.py +++ b/celery_worker/tasks.py @@ -3,7 +3,6 @@ from celery import shared_task -from app.models import ScheduledTask logger = logging.getLogger("django") @@ -16,6 +15,8 @@ def demodjango_task(timestamp): @shared_task(bind=True) def demodjango_scheduled_task(self): + from app.models import ScheduledTask + timestamp = datetime.utcnow() task = ScheduledTask() From d476cc8ccfba0abd3894617d29d69fb1db6dc467 Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Tue, 12 Mar 2024 15:54:43 +0000 Subject: [PATCH 09/11] Update procfile --- Procfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Procfile b/Procfile index 80abbcc..c08d91f 100644 --- a/Procfile +++ b/Procfile @@ -1,4 +1,4 @@ web: python migrate.py && python manage.py load_defaults && opentelemetry-instrument gunicorn -b 0.0.0.0:$PORT demodjango.wsgi:application celery-worker: celery --app demodjango.celery worker --task-events --loglevel INFO -celery-beat: python manage.py migrate && celery --app demodjango.celery beat --loglevel INFO +celery-beat: celery --app demodjango.celery beat --loglevel INFO check: python manage.py check From aef240d131c9023adb8fed93561af5ffaf7bc6cb Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Wed, 13 Mar 2024 15:34:59 +0000 Subject: [PATCH 10/11] Conditionally use RDS as default database --- app/views.py | 14 +++++++++++++- demodjango/settings.py | 26 ++++++++++++++++++-------- migrate.py | 7 +++++-- 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/app/views.py b/app/views.py index 6606226..d8a586f 100644 --- a/app/views.py +++ b/app/views.py @@ -45,6 +45,8 @@ HTTP_CONNECTION: 'HTTP Checks', } +RDS_DATABASE_CREDENTIALS = os.environ.get("RDS_DATABASE_CREDENTIALS", "") + def index(request): logger.info("Rendering landing page") @@ -95,6 +97,9 @@ def server_time_check(): def postgres_rds_check(): addon_type = ALL_CHECKS[POSTGRES_RDS] try: + if not RDS_DATABASE_CREDENTIALS: + raise Exception("No RDS database") + with connections['default'].cursor() as c: c.execute('SELECT version()') return render_connection_info(addon_type, True, c.fetchone()[0]) @@ -115,7 +120,11 @@ def postgres_aurora_check(): def sqlite_check(): addon_type = ALL_CHECKS[SQLITE] try: - with connections['sqlite'].cursor() as c: + db_name = "default" + if RDS_DATABASE_CREDENTIALS: + db_name = "sqlite" + + with connections[db_name].cursor() as c: c.execute('SELECT SQLITE_VERSION()') return render_connection_info(addon_type, True, c.fetchone()[0]) except Exception as e: @@ -202,6 +211,9 @@ def celery_beat_check(): addon_type = ALL_CHECKS[BEAT] try: + if not RDS_DATABASE_CREDENTIALS: + raise Exception("Database not found") + latest_task = ScheduledTask.objects.all().order_by('-timestamp').first() connection_info = f"Latest task scheduled with task_id {latest_task.taskid} at {latest_task.timestamp}" return render_connection_info(addon_type, True, connection_info) diff --git a/demodjango/settings.py b/demodjango/settings.py index cd54aa5..f6b81e8 100644 --- a/demodjango/settings.py +++ b/demodjango/settings.py @@ -150,15 +150,25 @@ sqlite_db_root = BASE_DIR if is_copilot() else Path(tempfile.gettempdir()) -DATABASES = { - "default": dj_database_url.config( - default=database_url_from_env("RDS_DATABASE_CREDENTIALS") - ), - "sqlite": { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': sqlite_db_root / "demodjango.sqlite3", +RDS_DATABASE_CREDENTIALS = os.getenv("RDS_DATABASE_CREDENTIALS", "") + +if RDS_DATABASE_CREDENTIALS: + DATABASES = { + "default": dj_database_url.config( + default=database_url_from_env("RDS_DATABASE_CREDENTIALS") + ), + "sqlite": { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': sqlite_db_root / "demodjango.sqlite3", + } + } +else: + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': sqlite_db_root / "demodjango.sqlite3", + } } -} DATABASE_CREDENTIALS = os.getenv("DATABASE_CREDENTIALS", "") diff --git a/migrate.py b/migrate.py index 595c433..671b058 100644 --- a/migrate.py +++ b/migrate.py @@ -2,11 +2,14 @@ import subprocess ACTIVE_CHECKS = [x.strip() for x in os.getenv("ACTIVE_CHECKS", "").split(",")] +RDS_DATABASE_CREDENTIALS = os.getenv("RDS_DATABASE_CREDENTIALS", "") -migrations = ["python manage.py migrate"] +if RDS_DATABASE_CREDENTIALS: + migrations = ["python manage.py migrate", "python manage.py migrate --database sqlite"] +else: + migrations = ["python manage.py migrate"] optional_migrations = { - "postgres_sqlite": "python manage.py migrate --database sqlite", "postgres_aurora": "python manage.py migrate --database aurora", } From 422bb7555f24e17d95312f86b66a6556249a542f Mon Sep 17 00:00:00 2001 From: John Stainsby Date: Thu, 14 Mar 2024 16:55:47 +0000 Subject: [PATCH 11/11] Rename confusing database credentials; Add comment for default database setup --- README.md | 4 ++-- app/views.py | 8 ++++---- demodjango/settings.py | 16 ++++++++-------- docker-compose.yml | 8 ++++---- migrate.py | 4 ++-- 5 files changed, 20 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 0f4107b..54e2e5b 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ By default, it will use SQLite for the backend database and the app will work fi To connect to an Aurora Postgres instance, set the following env var: ``` -DATABASE_CREDENTIALS = DATABASE_CREDENTIALS_STRING +AURORA_POSTGRES_CREDENTIALS = AURORA_POSTGRES_CREDENTIALS_STRING ``` ## RDS Postgres @@ -24,7 +24,7 @@ DATABASE_CREDENTIALS = DATABASE_CREDENTIALS_STRING To connect to an RDS Postgres instance, set the following env var: ``` -RDS_DATABASE_CREDENTIALS = "{"db_credential_key": "db_credential_value"}" +RDS_POSTGRES_CREDENTIALS = "{"db_credential_key": "db_credential_value"}" ``` ## Redis diff --git a/app/views.py b/app/views.py index d8a586f..aa2c278 100644 --- a/app/views.py +++ b/app/views.py @@ -45,7 +45,7 @@ HTTP_CONNECTION: 'HTTP Checks', } -RDS_DATABASE_CREDENTIALS = os.environ.get("RDS_DATABASE_CREDENTIALS", "") +RDS_POSTGRES_CREDENTIALS = os.environ.get("RDS_POSTGRES_CREDENTIALS", "") def index(request): @@ -97,7 +97,7 @@ def server_time_check(): def postgres_rds_check(): addon_type = ALL_CHECKS[POSTGRES_RDS] try: - if not RDS_DATABASE_CREDENTIALS: + if not RDS_POSTGRES_CREDENTIALS: raise Exception("No RDS database") with connections['default'].cursor() as c: @@ -121,7 +121,7 @@ def sqlite_check(): addon_type = ALL_CHECKS[SQLITE] try: db_name = "default" - if RDS_DATABASE_CREDENTIALS: + if RDS_POSTGRES_CREDENTIALS: db_name = "sqlite" with connections[db_name].cursor() as c: @@ -211,7 +211,7 @@ def celery_beat_check(): addon_type = ALL_CHECKS[BEAT] try: - if not RDS_DATABASE_CREDENTIALS: + if not RDS_POSTGRES_CREDENTIALS: raise Exception("Database not found") latest_task = ScheduledTask.objects.all().order_by('-timestamp').first() diff --git a/demodjango/settings.py b/demodjango/settings.py index f6b81e8..cce7edc 100644 --- a/demodjango/settings.py +++ b/demodjango/settings.py @@ -150,12 +150,13 @@ sqlite_db_root = BASE_DIR if is_copilot() else Path(tempfile.gettempdir()) -RDS_DATABASE_CREDENTIALS = os.getenv("RDS_DATABASE_CREDENTIALS", "") - -if RDS_DATABASE_CREDENTIALS: +# Django requires a default database. If RDS is present make it the default +# database to enable celery-beat, otherwise use SQLite +RDS_POSTGRES_CREDENTIALS = os.getenv("RDS_POSTGRES_CREDENTIALS", "") +if RDS_POSTGRES_CREDENTIALS: DATABASES = { "default": dj_database_url.config( - default=database_url_from_env("RDS_DATABASE_CREDENTIALS") + default=database_url_from_env("RDS_POSTGRES_CREDENTIALS") ), "sqlite": { 'ENGINE': 'django.db.backends.sqlite3', @@ -170,11 +171,10 @@ } } -DATABASE_CREDENTIALS = os.getenv("DATABASE_CREDENTIALS", "") - -if DATABASE_CREDENTIALS: +AURORA_POSTGRES_CREDENTIALS = os.getenv("AURORA_POSTGRES_CREDENTIALS", "") +if AURORA_POSTGRES_CREDENTIALS: DATABASES['aurora'] = dj_database_url.config( - default=database_url_from_env("DATABASE_CREDENTIALS") + default=database_url_from_env("AURORA_POSTGRES_CREDENTIALS") ) # Password validation diff --git a/docker-compose.yml b/docker-compose.yml index ba63fe5..176e2a8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,11 +10,11 @@ services: environment: ALLOWED_HOSTS: '*' AWS_ENDPOINT_URL: 'http://s3:9090' - DATABASE_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-aurora","username":"postgres"}' + AURORA_POSTGRES_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-aurora","username":"postgres"}' DEBUG: true DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production OPENSEARCH_ENDPOINT: 'http://opensearch:9200' - RDS_DATABASE_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' + RDS_POSTGRES_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' REDIS_ENDPOINT: 'redis://redis:6379' S3_BUCKET_NAME: test-bucket AWS_ACCESS_KEY_ID: access-key-id @@ -72,7 +72,7 @@ services: DEBUG: true DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production DJANGO_SETTINGS_MODULE: demodjango.settings - RDS_DATABASE_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' + RDS_POSTGRES_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' celery-beat: build: @@ -92,7 +92,7 @@ services: DEBUG: true DJANGO_SECRET_KEY: this_is_an_example_use_a_proper_key_in_production DJANGO_SETTINGS_MODULE: demodjango.settings - RDS_DATABASE_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' + RDS_POSTGRES_CREDENTIALS: '{"password":"pgSecretPassword","dbname":"main","engine":"postgres","port":5432,"dbInstanceIdentifier":"xxx","host":"postgres-rds","username":"postgres"}' postgres-rds: image: postgres diff --git a/migrate.py b/migrate.py index 671b058..849459f 100644 --- a/migrate.py +++ b/migrate.py @@ -2,9 +2,9 @@ import subprocess ACTIVE_CHECKS = [x.strip() for x in os.getenv("ACTIVE_CHECKS", "").split(",")] -RDS_DATABASE_CREDENTIALS = os.getenv("RDS_DATABASE_CREDENTIALS", "") +RDS_POSTGRES_CREDENTIALS = os.getenv("RDS_POSTGRES_CREDENTIALS", "") -if RDS_DATABASE_CREDENTIALS: +if RDS_POSTGRES_CREDENTIALS: migrations = ["python manage.py migrate", "python manage.py migrate --database sqlite"] else: migrations = ["python manage.py migrate"]