2 Commits

Author SHA1 Message Date
342cf151c0 updating 2024-08-13 00:55:26 +03:00
2d88f3c0c5 adding: celery 2024-08-13 00:45:14 +03:00
27 changed files with 197 additions and 280 deletions

View File

@@ -1,87 +0,0 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
docker/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
.flake8
.pre-commit-config.yaml

View File

@@ -13,10 +13,9 @@ REDIS_HOST=redis
REDIS_PORT=6379 REDIS_PORT=6379
REDIS_PASSWORD=redis REDIS_PASSWORD=redis
BROKER_HOST=redis RABBIT_HOST=rabbitmq
BROKER_PORT=6379 RABBIT_VHOST=/
BROKER_PASSWORD=redis RABBIT_PORT=5672
BROKER_DB=1 RABBIT_PORT_API=15672
RABBIT_LOGIN=admin
AUTH_HTTP_USER=admin RABBIT_PASSWORD=admin
AUTH_HTTP_PASSWORD=adminpass

View File

@@ -1,5 +0,0 @@
[flake8]
ignore = E203, E266, E501, W503, F403, F401, E402
max-line-length = 119
max-complexity = 18
select = B,C,E,F,W,T4,B9

View File

@@ -1,32 +0,0 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-merge-conflict
- repo: https://github.com/PyCQA/autoflake
rev: v2.3.1
hooks:
- id: autoflake
args:
- "--in-place"
- "--remove-duplicate-keys"
- "--remove-unused-variables"
- "--remove-all-unused-imports"
- repo: https://github.com/pre-commit/mirrors-isort
rev: v5.10.1
hooks:
- id: isort
args: ["--profile", "black"]
- repo: https://github.com/ambv/black
rev: 24.4.2
hooks:
- id: black
args:
- "--line-length=119"
- repo: https://github.com/PyCQA/flake8
rev: 7.1.0
hooks:
- id: flake8
default_language_version:
python: python3.12

View File

@@ -1,16 +1,16 @@
FROM python:3.12.0-slim FROM python:3.11.6-slim
WORKDIR /app WORKDIR /app
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED 1
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y python3-dev gcc libc-dev libffi-dev && \ apt-get install -y python3-dev gcc libc-dev libffi-dev && \
apt-get -y install libpq-dev gcc apt-get -y install libpq-dev gcc
COPY ../src/requirements.txt . COPY src/requirements.txt .
RUN pip install --upgrade pip && \ RUN pip install --upgrade pip && \
pip install -r requirements.txt pip install -r requirements.txt
COPY ../src . COPY src/. .

View File

@@ -1,6 +1,4 @@
# simplest Django and django-rq # simpliest django(uvicorn)+postgresql+fastapi+redis+nginx docker-compose (ready for production and dev)
## How to use
To run: To run:
`docker-compose up -d` `docker-compose up -d`
@@ -9,7 +7,7 @@ Site available on 8000 port.
You can make any changes in code, they will appear automatically. If you want to execute something with manage.py use: You can make any changes in code, they will appear automatically. If you want to execute something with manage.py use:
```sh ```
docker-compose exec app python3 manage.py migrate docker-compose exec app python3 manage.py migrate
docker-compose exec app python3 manage.py makemigrations docker-compose exec app python3 manage.py makemigrations
docker-compose exec app python3 manage.py update_admin admin adminpass # create superuser docker-compose exec app python3 manage.py update_admin admin adminpass # create superuser
@@ -17,31 +15,8 @@ docker-compose exec app python3 manage.py update_admin admin adminpass # create
and so on. and so on.
## Install formatting Example task [task_example.py](src/application/tasks/task_example.py)
**Features** Example register task [__init__.py](src/application/tasks/__init__.py)
- check for unsolved merge conflicts Example send task [send_task.py](src/application/management/commands/send_task.py)
- black formatting
- sort imports
- remove unused variables, imports, duplicates
- flake8 verification
It executes on **every** commit
```sh
pip install pre-commit flake8 black
pre-commit install
```
Apply for all files in current directory:
```sh
pre-commit run --all-files
```
If there is PEP8 errors, commit will be forbidden. To force commit use flag --no-verify:
```sh
git commit --no-verify ...
```

View File

@@ -14,8 +14,7 @@ services:
command: nginx -g "daemon off;" command: nginx -g "daemon off;"
depends_on: depends_on:
- app - app
- redis_commander - api
- rq_dashboard
image: nginx:alpine image: nginx:alpine
restart: on-failure restart: on-failure
volumes: volumes:
@@ -27,7 +26,7 @@ services:
app: app:
build: build:
context: . context: .
dockerfile: docker/Dockerfile dockerfile: Dockerfile
command: bash -c 'while !</dev/tcp/db/5432; do sleep 1; done; python3 manage.py collectstatic --no-input; python3 manage.py migrate; uvicorn core.asgi:application --port 8000 --host 0.0.0.0' command: bash -c 'while !</dev/tcp/db/5432; do sleep 1; done; python3 manage.py collectstatic --no-input; python3 manage.py migrate; uvicorn core.asgi:application --port 8000 --host 0.0.0.0'
volumes: volumes:
- ./src/:/app/ - ./src/:/app/
@@ -39,6 +38,20 @@ services:
env_file: env_file:
- .env - .env
api:
build:
context: .
dockerfile: Dockerfile
command: bash -c 'uvicorn core.asgi:fastapp --port 8000 --host 0.0.0.0'
volumes:
- ./src/:/app/
depends_on:
- db
- redis
restart: on-failure
env_file:
- .env
redis: redis:
image: redis:latest image: redis:latest
command: redis-server --requirepass ${REDIS_PASSWORD} command: redis-server --requirepass ${REDIS_PASSWORD}
@@ -46,45 +59,56 @@ services:
- redis-data:/data - redis-data:/data
restart: on-failure restart: on-failure
redis_commander: rabbitmq:
image: rediscommander/redis-commander:latest image: rabbitmq:3.13.2-management
environment:
RABBITMQ_DEFAULT_USER: ${RABBIT_LOGIN}
RABBITMQ_DEFAULT_PASS: ${RABBIT_PASSWORD}
RABBITMQ_DEFAULT_PORT: ${RABBIT_PORT}
RABBITMQ_DEFAULT_VHOST: ${RABBIT_VHOST}
restart: always restart: always
volumes:
- rabbitmq_data:/var/lib/rabbitmq
ports:
- "5672:5672"
flower:
image: mher/flower
hostname: flower
command: [
"celery",
"--broker=amqp://admin:admin@rabbitmq:5672//",
"flower",
"--broker-api=http://admin:admin@rabbitmq:15672/api/",
"--url_prefix=/flower"
]
depends_on: depends_on:
- redis - redis
environment: - app
URL_PREFIX: /redis_admin - celery_worker
REDIS_HOST: ${REDIS_HOST} - rabbitmq
REDIS_PASSWORD: ${REDIS_PASSWORD} volumes:
REDIS_PORT: ${REDIS_PORT} - flower_data:/data
HTTP_USER: ${AUTH_HTTP_USER} restart: on-failure
HTTP_PASSWORD: ${AUTH_HTTP_PASSWORD}
rq_dashboard: celery_worker:
image: kudaw/rq-dashboard
depends_on:
- redis
environment:
RQ_DASHBOARD_REDIS_URL: redis://:${BROKER_PASSWORD}@${BROKER_HOST}:${BROKER_PORT}/${BROKER_DB}
RQ_DASHBOARD_USERNAME: ${AUTH_HTTP_USER}
RQ_DASHBOARD_PASSWORD: ${AUTH_HTTP_PASSWORD}
RQ_DASHBOARD_URL_PREFIX: /dashboard
worker:
build: build:
context: . context: .
dockerfile: docker/DockerfileWorker command: celery -A core.celery_app worker --loglevel=info
command: bash -c 'supervisord -c /etc/supervisor/conf.d/supervisord.ini'
volumes: volumes:
- ./src/:/app/ - ./src/:/app/
- ./src/supervisord.ini:/etc/supervisor/conf.d/supervisord.ini
depends_on: depends_on:
- app
- redis - redis
restart: on-failure - rabbitmq
- db
env_file: env_file:
- .env - .env
volumes: volumes:
flower_data:
postgresql-data: postgresql-data:
rabbitmq_data:
static: static:
redis-data: redis-data:
external: false external: false

View File

@@ -1,19 +0,0 @@
FROM python:3.12.0-slim
WORKDIR /app
# set env variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
RUN apt-get update && \
apt-get install -y python3-dev gcc libc-dev libffi-dev supervisor && \
apt-get -y install libpq-dev gcc
# install dependencies
COPY ../src/requirements.txt .
RUN pip install --upgrade pip && pip install -r requirements.txt
# copy project
COPY ../src .
COPY ../src/supervisord.ini /etc/supervisor/conf.d/supervisord.ini

View File

@@ -23,12 +23,12 @@ http {
server app:8000; server app:8000;
} }
upstream redis { upstream api {
server redis_commander:8081; server api:8000;
} }
upstream rq_dashboard { upstream flower {
server rq_dashboard:9181; server flower:5555;
} }
server { server {
@@ -42,7 +42,7 @@ http {
alias /var/www/app/static/; alias /var/www/app/static/;
} }
location ~ ^/(django|django-rq)/ { location /django/ {
proxy_redirect off; proxy_redirect off;
proxy_set_header Host app; proxy_set_header Host app;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
@@ -51,20 +51,22 @@ http {
proxy_pass http://app; proxy_pass http://app;
} }
location /dashboard { location /flower/ {
proxy_redirect off;
proxy_set_header Host app;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name; proxy_set_header X-Forwarded-Host $server_name;
proxy_pass http://rq_dashboard/dashboard; proxy_pass http://flower;
} }
location /redis_admin { location / {
proxy_redirect off; proxy_redirect off;
proxy_set_header Host redis; proxy_set_header Host app;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name; proxy_set_header X-Forwarded-Host $server_name;
proxy_pass http://redis/redis_admin; proxy_pass http://api;
} }
} }
} }

View File

@@ -1,13 +0,0 @@
import time
from django.core.management import BaseCommand
from rq.job import Job
from application.tasks.example import example_task
class Command(BaseCommand):
def handle(self, *args, **options):
task: Job = example_task.delay(1, 1)
time.sleep(1)
print(task.return_value())

View File

@@ -0,0 +1,15 @@
from django.core.management.base import BaseCommand
from core import celery_app
from helpers.const.tasks import EXAMPLE_TASK_NAME
class Command(BaseCommand):
def handle(self, *args, **options):
celery_app.send_task(
name=EXAMPLE_TASK_NAME,
kwargs={
"x": 2,
"y": 3,
}
)

View File

@@ -0,0 +1,3 @@
from fastapi import APIRouter
router = APIRouter()

View File

@@ -0,0 +1,14 @@
from starlette import status
from starlette.responses import JSONResponse
from . import router
@router.get("/example")
async def example():
return JSONResponse(
status_code=status.HTTP_200_OK,
content={
"status": "OK"
}
)

View File

@@ -0,0 +1,4 @@
from core import celery_app
from .task_example import Example
celery_app.register_task(Example())

View File

@@ -0,0 +1,13 @@
from abc import ABC, abstractmethod
from celery import Task
class ProcessingQueue(ABC, Task):
@abstractmethod
def processed_task(self, *args, **kwargs):
pass
def run(self, *args, **kwargs):
return self.processed_task(*args, **kwargs)

View File

@@ -1,6 +0,0 @@
from django_rq import job
@job("default")
def example_task(a: int, b: int) -> float:
return a / b

View File

@@ -0,0 +1,12 @@
from application.tasks.abstract.base_task import ProcessingQueue
from helpers.const.tasks import EXAMPLE_TASK_NAME
class Example(ProcessingQueue):
name = EXAMPLE_TASK_NAME
max_retries = 5
default_retry_delay = 5
autoretry_for = (Exception,)
def processed_task(self, x: int | float, y: int | float):
return x + y

View File

@@ -0,0 +1,3 @@
from .celery_app import app as celery_app
__all__ = ('celery_app',)

View File

@@ -10,6 +10,22 @@ https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
import os import os
from django.core.asgi import get_asgi_application from django.core.asgi import get_asgi_application
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
application = get_asgi_application() application = get_asgi_application()
from application.routers.api import router
fastapp = FastAPI()
fastapp.include_router(router)
fastapp.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

14
src/core/celery_app.py Normal file
View File

@@ -0,0 +1,14 @@
import os
from celery import Celery
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'core.settings')
app = Celery(
'core',
broker=settings.CELERY_BROKER_URL,
)
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()

View File

@@ -12,15 +12,6 @@ ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS", "*").split(" ")
CSRF_TRUSTED_ORIGINS = os.getenv("CSRF_TRUSTED_ORIGINS", "http://* https://*").split(" ") CSRF_TRUSTED_ORIGINS = os.getenv("CSRF_TRUSTED_ORIGINS", "http://* https://*").split(" ")
REDIS_HOST = os.getenv("REDIS_HOST")
REDIS_PORT = int(os.getenv("REDIS_PORT"))
REDIS_PASSWORD = os.getenv("REDIS_PASSWORD")
BROKER_HOST = os.getenv("BROKER_HOST")
BROKER_PORT = int(os.getenv("BROKER_PORT"))
BROKER_PASSWORD = os.getenv("BROKER_PASSWORD")
BROKER_DB = os.getenv("BROKER_DB")
INSTALLED_APPS = [ INSTALLED_APPS = [
'django.contrib.admin', 'django.contrib.admin',
'django.contrib.auth', 'django.contrib.auth',
@@ -28,8 +19,9 @@ INSTALLED_APPS = [
'django.contrib.sessions', 'django.contrib.sessions',
'django.contrib.messages', 'django.contrib.messages',
'django.contrib.staticfiles', 'django.contrib.staticfiles',
'django_rq',
'application.apps.ApplicationConfig', 'application.apps.ApplicationConfig',
'djangoql',
'django_celery_results'
] ]
MIDDLEWARE = [ MIDDLEWARE = [
@@ -105,11 +97,17 @@ STATIC_ROOT = os.path.join(BASE_DIR, 'static')
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true" os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
RQ_QUEUES = { REDIS_HOST = os.getenv("REDIS_HOST")
'default': { REDIS_PORT = int(os.getenv("REDIS_PORT"))
'HOST': BROKER_HOST, REDIS_PASSWORD = os.getenv("REDIS_PASSWORD")
'PORT': BROKER_PORT,
'DB': BROKER_DB, RABBIT_HOST = os.getenv("RABBIT_HOST")
'PASSWORD': BROKER_PASSWORD, RABBIT_PORT = int(os.getenv("RABBIT_PORT"))
}, RABBIT_LOGIN = os.getenv("RABBIT_LOGIN")
} RABBIT_PASSWORD = os.getenv("RABBIT_PASSWORD")
RABBIT_VHOST = os.getenv("RABBIT_VHOST")
CELERY_BROKER_URL = f'amqp://{RABBIT_LOGIN}:{RABBIT_PASSWORD}@{RABBIT_HOST}:{RABBIT_PORT}/{RABBIT_VHOST}'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_CACHE_BACKEND = 'django-cache'
CELERY_RESULT_EXTENDED = True

View File

@@ -14,9 +14,8 @@ Including another URLconf
2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
""" """
from django.contrib import admin from django.contrib import admin
from django.urls import include, path from django.urls import path
urlpatterns = [ urlpatterns = [
path('django/admin/', admin.site.urls), path('django/admin/', admin.site.urls),
path('django-rq/', include('django_rq.urls'))
] ]

View File

View File

@@ -0,0 +1 @@
EXAMPLE_TASK_NAME = "example"

View File

@@ -1,6 +1,9 @@
uvicorn==0.27.1 uvicorn==0.27.1
fastapi==0.109.0
Django==5.0.2 Django==5.0.2
psycopg2 psycopg2
redis==5.2.1 redis==4.6.0
djangoql==0.18.1 djangoql==0.18.1
django-rq celery==5.4.0
django-celery-beat
django-celery-results==2.5.1

View File

@@ -1,16 +0,0 @@
[supervisord]
nodaemon = true
logfile = /var/log/supervisor/supervisord.log
pidfile = /var/run/supervisord.pid
stdout_logfile = /dev/stdout
stdout_logfile_maxbytes = 0
[program:myworker]
command = python3 /app/manage.py rqworker default --name %(process_num)s --with-scheduler
process_name = %(program_name)s-%(process_num)s
numprocs = 10
directory = /app
stopsignal = TERM
autostart = true
autorestart = true
user = root