Compare commits

...

1 Commits

Author SHA1 Message Date
69f02096da init django-rq 2025-02-11 20:43:52 +03:00
17 changed files with 159 additions and 69 deletions

View File

@ -11,7 +11,7 @@
# Docker
docker-compose.yml
Dockerfile
docker/Dockerfile
.docker
.dockerignore

View File

@ -12,3 +12,11 @@ DB_PORT=5432
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_PASSWORD=redis
BROKER_HOST=redis
BROKER_PORT=6379
BROKER_PASSWORD=redis
BROKER_DB=1
AUTH_HTTP_USER=admin
AUTH_HTTP_PASSWORD=adminpass

View File

@ -1,20 +1,26 @@
# simpliest django(uvicorn)+postgresql+fastapi+redis+nginx docker-compose (ready for production and dev)
# simplest Django and django-rq
## How to use
To run:
`docker-compose up -d`
Site available on 8000 port.
You can make any changes in code, they will appear automatically. If you want to execute something with manage.py use:
```sh
docker-compose exec app python3 manage.py migrate
docker-compose exec app python3 manage.py makemigrations
docker-compose exec app python3 manage.py update_admin admin adminpass # create superuser
```
and so on.
## Install formatting
**Features**
- check for unsolved merge conflicts
- black formatting
- sort imports
@ -22,15 +28,20 @@ and so on.
- flake8 verification
It executes on **every** commit
```sh
pip install pre-commit flake8 black
pre-commit install
```
Apply for all files in current directory:
```sh
pre-commit run --all-files
```
If there is PEP8 errors, commit will be forbidden. To force commit use flag --no-verify:
```sh
git commit --no-verify ...
```

View File

@ -14,7 +14,8 @@ services:
command: nginx -g "daemon off;"
depends_on:
- app
- api
- redis_commander
- rq_dashboard
image: nginx:alpine
restart: on-failure
volumes:
@ -26,7 +27,7 @@ services:
app:
build:
context: .
dockerfile: Dockerfile
dockerfile: docker/Dockerfile
command: bash -c 'while !</dev/tcp/db/5432; do sleep 1; done; python3 manage.py collectstatic --no-input; python3 manage.py migrate; uvicorn core.asgi:application --port 8000 --host 0.0.0.0'
volumes:
- ./src/:/app/
@ -38,20 +39,6 @@ services:
env_file:
- .env
api:
build:
context: .
dockerfile: Dockerfile
command: bash -c 'uvicorn core.asgi:fastapp --port 8000 --host 0.0.0.0'
volumes:
- ./src/:/app/
depends_on:
- db
- redis
restart: on-failure
env_file:
- .env
redis:
image: redis:latest
command: redis-server --requirepass ${REDIS_PASSWORD}
@ -59,6 +46,43 @@ services:
- redis-data:/data
restart: on-failure
redis_commander:
image: rediscommander/redis-commander:latest
restart: always
depends_on:
- redis
environment:
URL_PREFIX: /redis_admin
REDIS_HOST: ${REDIS_HOST}
REDIS_PASSWORD: ${REDIS_PASSWORD}
REDIS_PORT: ${REDIS_PORT}
HTTP_USER: ${AUTH_HTTP_USER}
HTTP_PASSWORD: ${AUTH_HTTP_PASSWORD}
rq_dashboard:
image: kudaw/rq-dashboard
depends_on:
- redis
environment:
RQ_DASHBOARD_REDIS_URL: redis://:${BROKER_PASSWORD}@${BROKER_HOST}:${BROKER_PORT}/${BROKER_DB}
RQ_DASHBOARD_USERNAME: ${AUTH_HTTP_USER}
RQ_DASHBOARD_PASSWORD: ${AUTH_HTTP_PASSWORD}
RQ_DASHBOARD_URL_PREFIX: /dashboard
worker:
build:
context: .
dockerfile: docker/DockerfileWorker
command: bash -c 'supervisord -c /etc/supervisor/conf.d/supervisord.ini'
volumes:
- ./src/:/app/
- ./src/supervisord.ini:/etc/supervisor/conf.d/supervisord.ini
depends_on:
- redis
restart: on-failure
env_file:
- .env
volumes:
postgresql-data:
static:

View File

@ -1,16 +1,16 @@
FROM python:3.11.6-slim
FROM python:3.12.0-slim
WORKDIR /app
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
RUN apt-get update && \
apt-get install -y python3-dev gcc libc-dev libffi-dev && \
apt-get -y install libpq-dev gcc
COPY src/requirements.txt .
COPY ../src/requirements.txt .
RUN pip install --upgrade pip && \
pip install -r requirements.txt
COPY src/. .
COPY ../src .

19
docker/DockerfileWorker Normal file
View File

@ -0,0 +1,19 @@
FROM python:3.12.0-slim
WORKDIR /app
# set env variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
RUN apt-get update && \
apt-get install -y python3-dev gcc libc-dev libffi-dev supervisor && \
apt-get -y install libpq-dev gcc
# install dependencies
COPY ../src/requirements.txt .
RUN pip install --upgrade pip && pip install -r requirements.txt
# copy project
COPY ../src .
COPY ../src/supervisord.ini /etc/supervisor/conf.d/supervisord.ini

View File

@ -23,10 +23,13 @@ http {
server app:8000;
}
upstream api {
server api:8000;
upstream redis {
server redis_commander:8081;
}
upstream rq_dashboard {
server rq_dashboard:9181;
}
server {
listen 8000;
@ -39,7 +42,7 @@ http {
alias /var/www/app/static/;
}
location /django/ {
location ~ ^/(django|django-rq)/ {
proxy_redirect off;
proxy_set_header Host app;
proxy_set_header X-Real-IP $remote_addr;
@ -48,13 +51,20 @@ http {
proxy_pass http://app;
}
location / {
proxy_redirect off;
proxy_set_header Host app;
location /dashboard {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name;
proxy_pass http://api;
proxy_pass http://rq_dashboard/dashboard;
}
location /redis_admin {
proxy_redirect off;
proxy_set_header Host redis;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name;
proxy_pass http://redis/redis_admin;
}
}
}

View File

@ -0,0 +1,13 @@
import time
from django.core.management import BaseCommand
from rq.job import Job
from application.tasks.example import example_task
class Command(BaseCommand):
def handle(self, *args, **options):
task: Job = example_task.delay(1, 1)
time.sleep(1)
print(task.return_value())

View File

@ -1,3 +0,0 @@
from fastapi import APIRouter
router = APIRouter()

View File

@ -1,14 +0,0 @@
from starlette import status
from starlette.responses import JSONResponse
from . import router
@router.get("/example")
async def example():
return JSONResponse(
status_code=status.HTTP_200_OK,
content={
"status": "OK"
}
)

View File

View File

@ -0,0 +1,6 @@
from django_rq import job
@job("default")
def example_task(a: int, b: int) -> float:
return a / b

View File

@ -10,22 +10,6 @@ https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
import os
from django.core.asgi import get_asgi_application
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings")
application = get_asgi_application()
from application.routers.api import router
fastapp = FastAPI()
fastapp.include_router(router)
fastapp.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

View File

@ -16,6 +16,11 @@ REDIS_HOST = os.getenv("REDIS_HOST")
REDIS_PORT = int(os.getenv("REDIS_PORT"))
REDIS_PASSWORD = os.getenv("REDIS_PASSWORD")
BROKER_HOST = os.getenv("BROKER_HOST")
BROKER_PORT = int(os.getenv("BROKER_PORT"))
BROKER_PASSWORD = os.getenv("BROKER_PASSWORD")
BROKER_DB = os.getenv("BROKER_DB")
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
@ -23,7 +28,8 @@ INSTALLED_APPS = [
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'application.apps.ApplicationConfig'
'django_rq',
'application.apps.ApplicationConfig',
]
MIDDLEWARE = [
@ -98,3 +104,12 @@ STATIC_ROOT = os.path.join(BASE_DIR, 'static')
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
RQ_QUEUES = {
'default': {
'HOST': BROKER_HOST,
'PORT': BROKER_PORT,
'DB': BROKER_DB,
'PASSWORD': BROKER_PASSWORD,
},
}

View File

@ -14,8 +14,9 @@ Including another URLconf
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.urls import include, path
urlpatterns = [
path('django/admin/', admin.site.urls),
path('django-rq/', include('django_rq.urls'))
]

View File

@ -1,6 +1,6 @@
uvicorn==0.27.1
fastapi==0.109.0
Django==5.0.2
psycopg2
redis==4.6.0
redis==5.2.1
djangoql==0.18.1
django-rq

16
src/supervisord.ini Normal file
View File

@ -0,0 +1,16 @@
[supervisord]
nodaemon = true
logfile = /var/log/supervisor/supervisord.log
pidfile = /var/run/supervisord.pid
stdout_logfile = /dev/stdout
stdout_logfile_maxbytes = 0
[program:myworker]
command = python3 /app/manage.py rqworker default
process_name = %(program_name)s-%(process_num)s
numprocs = 10
directory = /app
stopsignal = TERM
autostart = true
autorestart = true
user = root