init
This commit is contained in:
commit
5f51ac7d22
165
.gitignore
vendored
Normal file
165
.gitignore
vendored
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
### Python template
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
.venv
|
||||||
|
.idea
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||||
|
.pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
48
README.md
Normal file
48
README.md
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# Template sqlalchemy-template
|
||||||
|
|
||||||
|
This guide covers setting up the project locally for development and testing purposes.
|
||||||
|
|
||||||
|
## Project Setup
|
||||||
|
|
||||||
|
### Virtual Environment
|
||||||
|
|
||||||
|
It's recommended to use a Python virtual environment to isolate dependencies:
|
||||||
|
|
||||||
|
```
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
Copy `.env.example` to `.env` and update any credentials, settings, etc.
|
||||||
|
|
||||||
|
### Install Dependencies
|
||||||
|
|
||||||
|
`pip install -r src/requirements.txt`
|
||||||
|
|
||||||
|
### Initialize Database
|
||||||
|
|
||||||
|
Create migrations from updated database schema:
|
||||||
|
|
||||||
|
`alembic revision --autogenerate -m "<migrations name>"`
|
||||||
|
|
||||||
|
Run migrations to setup database schema:
|
||||||
|
|
||||||
|
`alembic upgrade head`
|
||||||
|
|
||||||
|
###
|
||||||
|
|
||||||
|
## Start Local Development
|
||||||
|
|
||||||
|
### Start database and other services with docker
|
||||||
|
|
||||||
|
`docker-compose up -d`
|
||||||
|
|
||||||
|
### Run app
|
||||||
|
|
||||||
|
`uvicorn app.main:app --reload`
|
||||||
|
|
||||||
|
### Create superuser
|
||||||
|
|
||||||
|
`python apps/users/scripts/create_user.py -un admin -al 3 -pass adminpass`
|
25
docker-compose.local.yaml
Normal file
25
docker-compose.local.yaml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17.5-alpine
|
||||||
|
container_name: postgres
|
||||||
|
command:
|
||||||
|
- "postgres"
|
||||||
|
- "-c"
|
||||||
|
- "max_connections=1000"
|
||||||
|
- "-c"
|
||||||
|
- "statement_timeout=${DB_STATEMENT_TIMEOUT:-300s}"
|
||||||
|
- "-c"
|
||||||
|
- "idle_in_transaction_session_timeout=${DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-300s}"
|
||||||
|
volumes:
|
||||||
|
- postgresql-data:/var/lib/postgresql/data
|
||||||
|
restart: on-failure
|
||||||
|
env_file:
|
||||||
|
- src/.env
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:5432:5432"
|
||||||
|
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgresql-data:
|
||||||
|
|
||||||
|
|
46
docker-compose.yaml
Normal file
46
docker-compose.yaml
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
services:
|
||||||
|
nginx:
|
||||||
|
command: nginx -g "daemon off;"
|
||||||
|
depends_on:
|
||||||
|
- api
|
||||||
|
image: nginx:alpine
|
||||||
|
restart: on-failure
|
||||||
|
volumes:
|
||||||
|
- ./nginx/nginx.conf:/etc/nginx/nginx.conf
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:8000:8000"
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17.5-alpine
|
||||||
|
container_name: postgres
|
||||||
|
command:
|
||||||
|
- "postgres"
|
||||||
|
- "-c"
|
||||||
|
- "max_connections=1000"
|
||||||
|
- "-c"
|
||||||
|
- "statement_timeout=${DB_STATEMENT_TIMEOUT:-300s}"
|
||||||
|
- "-c"
|
||||||
|
- "idle_in_transaction_session_timeout=${DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-300s}"
|
||||||
|
volumes:
|
||||||
|
- postgresql-data:/var/lib/postgresql/data
|
||||||
|
restart: on-failure
|
||||||
|
env_file:
|
||||||
|
- src/.env
|
||||||
|
|
||||||
|
api:
|
||||||
|
build:
|
||||||
|
context: src
|
||||||
|
dockerfile: ./Dockerfile
|
||||||
|
restart: on-failure
|
||||||
|
command: bash -c "alembic upgrade head; uvicorn apps.main:app --host 0.0.0.0 --port 8000 --reload"
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
volumes:
|
||||||
|
- ./src/:/app/
|
||||||
|
env_file:
|
||||||
|
- src/.env
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgresql-data:
|
||||||
|
|
||||||
|
|
58
nginx/nginx.conf
Normal file
58
nginx/nginx.conf
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
user nginx;
|
||||||
|
worker_processes 1;
|
||||||
|
|
||||||
|
error_log /var/log/nginx/error.log warn;
|
||||||
|
pid /var/run/nginx.pid;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
include /etc/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
sendfile off;
|
||||||
|
keepalive_timeout 5s;
|
||||||
|
client_max_body_size 200M;
|
||||||
|
|
||||||
|
map $http_upgrade $connection_upgrade {
|
||||||
|
default upgrade;
|
||||||
|
'' close;
|
||||||
|
}
|
||||||
|
|
||||||
|
log_format main '$remote_addr - $remote_user [$time_local] "$request" $status '
|
||||||
|
'$body_bytes_sent "$http_referer" "$http_user_agent" "$http_x_forwarded_for"';
|
||||||
|
access_log /var/log/nginx/access.log main;
|
||||||
|
|
||||||
|
|
||||||
|
upstream api {
|
||||||
|
server api:8000;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 8000;
|
||||||
|
charset utf-8;
|
||||||
|
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://api;
|
||||||
|
proxy_redirect off;
|
||||||
|
proxy_set_header Host api;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $server_name;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /admin {
|
||||||
|
proxy_pass http://api/admin;
|
||||||
|
proxy_redirect off;
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $server_name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
6
src/.env.example
Normal file
6
src/.env.example
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
POSTGRES_NAME=postgres
|
||||||
|
POSTGRES_USER=postgres
|
||||||
|
POSTGRES_PASSWORD=postgres
|
||||||
|
POSTGRES_HOST=postgres
|
||||||
|
POSTGRES_PORT=5432
|
||||||
|
ADMIN_SECRET_KEY=somesecret
|
25
src/Dockerfile
Normal file
25
src/Dockerfile
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
FROM python:3.13-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
ENV OPENBLAS_NUM_THREADS=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
RUN apt-get update --fix-missing && apt-get install -y --no-install-recommends \
|
||||||
|
build-essential \
|
||||||
|
gcc \
|
||||||
|
libffi-dev \
|
||||||
|
libpq-dev \
|
||||||
|
curl \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip \
|
||||||
|
&& pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY .. .
|
147
src/alembic.ini
Normal file
147
src/alembic.ini
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts.
|
||||||
|
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||||
|
# format, relative to the token %(here)s which refers to the location of this
|
||||||
|
# ini file
|
||||||
|
script_location = db/migrations
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
|
# is defined by "path_separator" below.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to <script_location>/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "path_separator"
|
||||||
|
# below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||||
|
|
||||||
|
# path_separator; This indicates what character is used to split lists of file
|
||||||
|
# paths, including version_locations and prepend_sys_path within configparser
|
||||||
|
# files such as alembic.ini.
|
||||||
|
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||||
|
# to provide os-dependent path splitting.
|
||||||
|
#
|
||||||
|
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||||
|
# take place if path_separator is not present in alembic.ini. If this
|
||||||
|
# option is omitted entirely, fallback logic is as follows:
|
||||||
|
#
|
||||||
|
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||||
|
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||||
|
# behavior of splitting on spaces, commas, or colons.
|
||||||
|
#
|
||||||
|
# Valid values for path_separator are:
|
||||||
|
#
|
||||||
|
# path_separator = :
|
||||||
|
# path_separator = ;
|
||||||
|
# path_separator = space
|
||||||
|
# path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# database URL. This is consumed by the user-maintained env.py script only.
|
||||||
|
# other means of configuring database URLs may be customized within the env.py
|
||||||
|
# file.
|
||||||
|
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = module
|
||||||
|
# ruff.module = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration. This is also consumed by the user-maintained
|
||||||
|
# env.py script only.
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
0
src/apps/__init__.py
Normal file
0
src/apps/__init__.py
Normal file
33
src/apps/initializer.py
Normal file
33
src/apps/initializer.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from sqladmin.application import Admin
|
||||||
|
|
||||||
|
from apps.users.admin import UserAdmin, UserSessionAdmin
|
||||||
|
from core.env import ADMIN_SECRET_KEY
|
||||||
|
from helpers.admin.auth import AdminAuth
|
||||||
|
|
||||||
|
|
||||||
|
def init(app: FastAPI, engine: Any):
|
||||||
|
"""
|
||||||
|
Init routers and etc.
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
init_admin(app, engine)
|
||||||
|
init_routers(app)
|
||||||
|
|
||||||
|
|
||||||
|
def init_routers(app: FastAPI):
|
||||||
|
"""
|
||||||
|
Initialize routers defined in `app.api`
|
||||||
|
:param app:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def init_admin(app: FastAPI, engine: Any):
|
||||||
|
authentication_backend = AdminAuth(secret_key=ADMIN_SECRET_KEY)
|
||||||
|
admin = Admin(app=app, engine=engine, authentication_backend=authentication_backend)
|
||||||
|
admin.add_view(UserAdmin)
|
||||||
|
admin.add_view(UserSessionAdmin)
|
21
src/apps/main.py
Normal file
21
src/apps/main.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from starlette.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from db import db_conn
|
||||||
|
from helpers.logging import logger
|
||||||
|
from .initializer import init
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["POST"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("Starting application initialization...")
|
||||||
|
init(app, db_conn.engine)
|
||||||
|
logger.info("Initialization...")
|
0
src/apps/users/__init__.py
Normal file
0
src/apps/users/__init__.py
Normal file
45
src/apps/users/admin.py
Normal file
45
src/apps/users/admin.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from fastapi import Request
|
||||||
|
|
||||||
|
from db.models.user import User, UserSession
|
||||||
|
from helpers.admin.auth import check_accesses_level
|
||||||
|
from helpers.admin.base.views import CustomModelView
|
||||||
|
from helpers.admin.enums import AccessLevel
|
||||||
|
|
||||||
|
|
||||||
|
class UserAdmin(CustomModelView, model=User):
|
||||||
|
column_exclude_list = ["sessions", "password"]
|
||||||
|
column_details_exclude_list = ["sessions"]
|
||||||
|
form_excluded_columns = ["sessions"]
|
||||||
|
|
||||||
|
category = "users"
|
||||||
|
|
||||||
|
async def on_model_change(self, data, model, is_created: bool):
|
||||||
|
password = data.get("password")
|
||||||
|
if password == "<PasswordHash>":
|
||||||
|
data.pop("password")
|
||||||
|
await super().on_model_change(data, model, is_created)
|
||||||
|
|
||||||
|
def is_accessible(self, request: Request) -> bool:
|
||||||
|
return check_accesses_level(
|
||||||
|
AccessLevel.administrator, request.session["access_level"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_visible(self, request: Request) -> bool:
|
||||||
|
return check_accesses_level(
|
||||||
|
AccessLevel.administrator, request.session["access_level"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UserSessionAdmin(CustomModelView, model=UserSession):
|
||||||
|
column_list = "__all__"
|
||||||
|
category = "auth"
|
||||||
|
|
||||||
|
def is_accessible(self, request: Request) -> bool:
|
||||||
|
return check_accesses_level(
|
||||||
|
AccessLevel.administrator, request.session["access_level"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_visible(self, request: Request) -> bool:
|
||||||
|
return check_accesses_level(
|
||||||
|
AccessLevel.administrator, request.session["access_level"]
|
||||||
|
)
|
0
src/apps/users/scripts/__init__.py
Normal file
0
src/apps/users/scripts/__init__.py
Normal file
42
src/apps/users/scripts/create_user.py
Normal file
42
src/apps/users/scripts/create_user.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from db import db_conn
|
||||||
|
from db.models.user import User
|
||||||
|
from helpers.logging import logger
|
||||||
|
from repositories.user_repository import UserRepository
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("-un", "--username", help="user name", required=True)
|
||||||
|
parser.add_argument(
|
||||||
|
"-al", "--access_level", type=int, help="access level", required=False
|
||||||
|
)
|
||||||
|
parser.add_argument("-pass", "--password", type=str, help="password", required=True)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
username = args.username
|
||||||
|
async with db_conn.async_session_manager() as session:
|
||||||
|
user_repo = UserRepository(session)
|
||||||
|
user = await user_repo.get_by_username(username)
|
||||||
|
if user:
|
||||||
|
await user_repo.update(
|
||||||
|
user.id, password=args.password.encode(), access_level=args.access_level
|
||||||
|
)
|
||||||
|
logger.info(f"User {username} updated! access_level: {args.access_level}")
|
||||||
|
else:
|
||||||
|
await user_repo.create(
|
||||||
|
User(
|
||||||
|
password=args.password,
|
||||||
|
access_level=args.access_level,
|
||||||
|
username=username,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info(f"User {username} created! access_level: {args.access_level}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
0
src/core/__init__.py
Normal file
0
src/core/__init__.py
Normal file
34
src/core/db_config.py
Normal file
34
src/core/db_config.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pydantic import Field, computed_field
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
base_path = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
|
||||||
|
class DataBaseSettings(BaseSettings):
|
||||||
|
model_config = SettingsConfigDict(
|
||||||
|
env_file=base_path / ".env",
|
||||||
|
env_ignore_empty=True,
|
||||||
|
extra="ignore",
|
||||||
|
)
|
||||||
|
db_host: str = Field(alias="POSTGRES_HOST")
|
||||||
|
db_port: int = Field(alias="POSTGRES_PORT")
|
||||||
|
db_name: str = Field(alias="POSTGRES_NAME")
|
||||||
|
db_user: str = Field(alias="POSTGRES_USER")
|
||||||
|
db_pass: str = Field(alias="POSTGRES_PASSWORD")
|
||||||
|
|
||||||
|
@computed_field
|
||||||
|
@property
|
||||||
|
def db_url(self) -> str:
|
||||||
|
return f"postgresql+asyncpg://{self.db_user}:{self.db_pass}@{self.db_host}:{self.db_port}/{self.db_name}"
|
||||||
|
|
||||||
|
|
||||||
|
@computed_field
|
||||||
|
@property
|
||||||
|
def sync_db_url(self) -> str:
|
||||||
|
return f"postgresql://{self.db_user}:{self.db_pass}@{self.db_host}:{self.db_port}/{self.db_name}"
|
||||||
|
|
||||||
|
|
||||||
|
db_settings = DataBaseSettings()
|
15
src/core/env.py
Normal file
15
src/core/env.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
POSTGRES_HOST = os.environ.get('POSTGRES_HOST', 'localhost')
|
||||||
|
POSTGRES_PORT = os.environ.get('POSTGRES_PORT', '5432')
|
||||||
|
POSTGRES_NAME = os.environ.get('POSTGRES_NAME', 'postgres')
|
||||||
|
POSTGRES_USER = os.environ.get('POSTGRES_USER', 'postgres')
|
||||||
|
POSTGRES_PASSWORD = os.environ.get('POSTGRES_PASSWORD', 'postgres')
|
||||||
|
|
||||||
|
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
|
||||||
|
LOG_FORMAT = os.getenv(
|
||||||
|
"LOG_FORMAT", "[%(levelname)s]: %(message)s | %(pathname)s:%(funcName)s:%(lineno)d"
|
||||||
|
)
|
||||||
|
|
||||||
|
ADMIN_SECRET_KEY = os.environ.get("ADMIN_SECRET_KEY")
|
0
src/crypto/__init__.py
Normal file
0
src/crypto/__init__.py
Normal file
36
src/crypto/password.py
Normal file
36
src/crypto/password.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import bcrypt
|
||||||
|
from sqlalchemy.ext.mutable import Mutable
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordHash(Mutable):
|
||||||
|
def __init__(self, hash_: str, rounds: int = 12):
|
||||||
|
self.hash = str(hash_)
|
||||||
|
self.rounds = rounds
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Simple object representation."""
|
||||||
|
return "<{}>".format(type(self).__name__)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def coerce(cls, key: str, value):
|
||||||
|
"""Ensure that loaded values are PasswordHashes."""
|
||||||
|
if isinstance(value, PasswordHash):
|
||||||
|
return value
|
||||||
|
return super(PasswordHash, cls).coerce(key, value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def new(cls, password: str, rounds: int | None = None):
|
||||||
|
"""Returns a new PasswordHash object for the given password and rounds."""
|
||||||
|
return cls(cls._new(password, rounds))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _new(password: str, rounds: int | None = None):
|
||||||
|
"""Returns a new crypt hash for the given password and rounds."""
|
||||||
|
salt = bcrypt.gensalt(rounds)
|
||||||
|
hashed = bcrypt.hashpw(password.encode(), salt)
|
||||||
|
return hashed.decode()
|
||||||
|
|
||||||
|
def _rehash(self, password: str):
|
||||||
|
"""Recreates the internal hash and marks the object as changed."""
|
||||||
|
self.hash = self._new(password, self.rounds)
|
||||||
|
self.changed()
|
6
src/db/__init__.py
Normal file
6
src/db/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from core.db_config import db_settings
|
||||||
|
from .models.user import *
|
||||||
|
from .providers import DataAsyncProvider
|
||||||
|
|
||||||
|
|
||||||
|
db_conn = DataAsyncProvider(db_settings.db_url)
|
1
src/db/migrations/README
Normal file
1
src/db/migrations/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
99
src/db/migrations/env.py
Normal file
99
src/db/migrations/env.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from logging.config import fileConfig
|
||||||
|
from os.path import abspath, dirname
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
from sqlalchemy.engine import Connection
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncEngine
|
||||||
|
|
||||||
|
from core.db_config import db_settings
|
||||||
|
from db.models.base import Base
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.insert(0, dirname(dirname(dirname(abspath(__file__)))))
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the core file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
config.set_main_option("sqlalchemy.url", db_settings.db_url)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
|
||||||
|
# other values from the core, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = core.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
compare_type=True
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def do_run_migrations(connection: Connection) -> None:
|
||||||
|
context.configure(connection=connection,
|
||||||
|
target_metadata=target_metadata, compare_type=True)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
async def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = AsyncEngine(
|
||||||
|
engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
future=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async with connectable.connect() as connection:
|
||||||
|
await connection.run_sync(do_run_migrations)
|
||||||
|
|
||||||
|
await connectable.dispose()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
asyncio.run(run_migrations_online())
|
28
src/db/migrations/script.py.mako
Normal file
28
src/db/migrations/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
${downgrades if downgrades else "pass"}
|
32
src/db/migrations/versions/01c1151f5b52_init.py
Normal file
32
src/db/migrations/versions/01c1151f5b52_init.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""init
|
||||||
|
|
||||||
|
Revision ID: 01c1151f5b52
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-08-03 19:37:01.619206
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '01c1151f5b52'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
57
src/db/migrations/versions/be9893939a59_add_users.py
Normal file
57
src/db/migrations/versions/be9893939a59_add_users.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
"""add_users
|
||||||
|
|
||||||
|
Revision ID: be9893939a59
|
||||||
|
Revises: 01c1151f5b52
|
||||||
|
Create Date: 2025-08-03 19:39:48.172257
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
from db import Password
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'be9893939a59'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '01c1151f5b52'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('users',
|
||||||
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('username', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('password', Password(length=156), nullable=False),
|
||||||
|
sa.Column('access_level',
|
||||||
|
sa.Enum('user', 'support', 'moderator', 'administrator', name='accesslevel'),
|
||||||
|
nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('username')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=True)
|
||||||
|
op.create_table('user_sessions',
|
||||||
|
sa.Column('token', sa.UUID(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('token')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_user_sessions_token'), 'user_sessions', ['token'], unique=False)
|
||||||
|
op.create_index(op.f('ix_user_sessions_user_id'), 'user_sessions', ['user_id'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f('ix_user_sessions_user_id'), table_name='user_sessions')
|
||||||
|
op.drop_index(op.f('ix_user_sessions_token'), table_name='user_sessions')
|
||||||
|
op.drop_table('user_sessions')
|
||||||
|
op.drop_index(op.f('ix_users_id'), table_name='users')
|
||||||
|
op.drop_table('users')
|
||||||
|
# ### end Alembic commands ###
|
0
src/db/models/__init__.py
Normal file
0
src/db/models/__init__.py
Normal file
5
src/db/models/base.py
Normal file
5
src/db/models/base.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Base."""
|
||||||
|
from sqlalchemy.orm import declarative_base
|
||||||
|
|
||||||
|
|
||||||
|
Base = declarative_base()
|
50
src/db/models/user.py
Normal file
50
src/db/models/user.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import uuid
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
from sqlalchemy import Column, ForeignKey, String, func
|
||||||
|
from sqlalchemy.orm import relationship, validates
|
||||||
|
from sqlalchemy.sql.expression import select
|
||||||
|
from sqlalchemy.sql.sqltypes import UUID, BigInteger, Enum, DateTime
|
||||||
|
|
||||||
|
from helpers.admin.enums import AccessLevel
|
||||||
|
from .base import Base
|
||||||
|
from ..types.fields import Password
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
id = Column(BigInteger, primary_key=True, index=True, unique=True, nullable=False)
|
||||||
|
username = Column(String(50), nullable=False, unique=True)
|
||||||
|
password = Column(Password(length=156), nullable=False)
|
||||||
|
access_level = Column(Enum(AccessLevel))
|
||||||
|
|
||||||
|
sessions = relationship("UserSession", back_populates="user")
|
||||||
|
|
||||||
|
@validates("password")
|
||||||
|
def _validate_password(self, key, password):
|
||||||
|
return getattr(type(self), key).type.validator(password)
|
||||||
|
|
||||||
|
def verify_password(self, password):
|
||||||
|
return bcrypt.checkpw(password.encode(), self.password.hash.encode())
|
||||||
|
|
||||||
|
|
||||||
|
class UserSession(Base):
|
||||||
|
__tablename__ = "user_sessions"
|
||||||
|
|
||||||
|
token = Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
default=uuid.uuid4,
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
primary_key=True,
|
||||||
|
)
|
||||||
|
user_id = Column(BigInteger, ForeignKey("users.id"), index=True, nullable=False)
|
||||||
|
|
||||||
|
user = relationship("User", back_populates="sessions")
|
||||||
|
created_at = Column(DateTime, default=func.now(), nullable=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _filter_session_by_user_id(cls, user_id: int):
|
||||||
|
query = select(cls).where(cls.user_id == user_id)
|
||||||
|
return query
|
34
src/db/providers.py
Normal file
34
src/db/providers.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||||
|
from sqlalchemy.sql.expression import text
|
||||||
|
|
||||||
|
from helpers.logging import logger
|
||||||
|
|
||||||
|
|
||||||
|
class DataAsyncProvider:
|
||||||
|
def __init__(self, db_url: str):
|
||||||
|
self.url = db_url
|
||||||
|
self.engine = create_async_engine(self.url, echo=False, future=True)
|
||||||
|
self.async_session_factory = async_sessionmaker(
|
||||||
|
self.engine, class_=AsyncSession, expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_async_session(self) -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
async with self.async_session_factory() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def async_session_manager(self) -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
async with self.async_session_factory() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
async def is_connected(self) -> bool:
|
||||||
|
try:
|
||||||
|
async with self.async_session_manager() as session:
|
||||||
|
await session.execute(text("SELECT 1"))
|
||||||
|
return True
|
||||||
|
except Exception as ex:
|
||||||
|
logger.exception(ex)
|
||||||
|
return False
|
0
src/db/types/__init__.py
Normal file
0
src/db/types/__init__.py
Normal file
63
src/db/types/fields.py
Normal file
63
src/db/types/fields.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
from sqlalchemy.types import TypeDecorator, Text, String
|
||||||
|
|
||||||
|
from crypto.password import PasswordHash
|
||||||
|
|
||||||
|
|
||||||
|
class JSONEncodedDict(TypeDecorator):
|
||||||
|
|
||||||
|
impl = Text
|
||||||
|
|
||||||
|
def process_bind_param(self, value, dialect):
|
||||||
|
if isinstance(value, dict | list | tuple):
|
||||||
|
return json.dumps(value, separators=(",", ":"))
|
||||||
|
elif isinstance(value, str):
|
||||||
|
json.loads(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def process_result_value(self, value, dialect):
|
||||||
|
if value is not None:
|
||||||
|
value = json.loads(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class Password(TypeDecorator):
|
||||||
|
"""Allows storing and retrieving password hashes using PasswordHash."""
|
||||||
|
|
||||||
|
impl = String
|
||||||
|
|
||||||
|
def __init__(self, rounds=12, **kwds):
|
||||||
|
self.rounds = rounds
|
||||||
|
super(Password, self).__init__(**kwds)
|
||||||
|
|
||||||
|
def process_bind_param(self, value, dialect):
|
||||||
|
"""Ensure the value is a PasswordHash and then return its hash."""
|
||||||
|
if value is not None:
|
||||||
|
return self._convert(value).hash
|
||||||
|
return None
|
||||||
|
|
||||||
|
def process_result_value(self, value, dialect):
|
||||||
|
"""Convert the hash to a PasswordHash, if it's non-NULL."""
|
||||||
|
if value is not None:
|
||||||
|
return PasswordHash(value, rounds=self.rounds)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def validator(self, password):
|
||||||
|
"""Provides a validator/converter for @validates usage."""
|
||||||
|
return self._convert(password)
|
||||||
|
|
||||||
|
def _convert(self, value):
|
||||||
|
"""Returns a PasswordHash from the given string.
|
||||||
|
|
||||||
|
PasswordHash instances or None values will return unchanged.
|
||||||
|
Strings will be hashed and the resulting PasswordHash returned.
|
||||||
|
Any other input will result in a TypeError.
|
||||||
|
"""
|
||||||
|
if isinstance(value, PasswordHash):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return PasswordHash.new(value, self.rounds)
|
||||||
|
elif value is not None:
|
||||||
|
raise TypeError("Cannot convert {} to a PasswordHash".format(type(value)))
|
||||||
|
return None
|
0
src/helpers/__init__.py
Normal file
0
src/helpers/__init__.py
Normal file
0
src/helpers/admin/__init__.py
Normal file
0
src/helpers/admin/__init__.py
Normal file
60
src/helpers/admin/auth.py
Normal file
60
src/helpers/admin/auth.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from fastapi import Request
|
||||||
|
from sqladmin.authentication import AuthenticationBackend
|
||||||
|
|
||||||
|
from db import db_conn
|
||||||
|
from db.models.user import UserSession
|
||||||
|
from repositories.user_repository import UserRepository, UserSessionRepository
|
||||||
|
from .enums import AccessLevel
|
||||||
|
|
||||||
|
|
||||||
|
class AdminAuth(AuthenticationBackend):
|
||||||
|
|
||||||
|
async def login(self, request: Request) -> bool:
|
||||||
|
form = await request.form()
|
||||||
|
username, password = form["username"], form["password"]
|
||||||
|
async with db_conn.async_session_manager() as session:
|
||||||
|
user_rep = UserRepository(session)
|
||||||
|
user = await user_rep.get_by_username(username=username)
|
||||||
|
if user and user.verify_password(password):
|
||||||
|
user_session_rep = UserSessionRepository(session)
|
||||||
|
user_session = await user_session_rep.create(
|
||||||
|
UserSession(
|
||||||
|
user_id=user.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
request.session.update(
|
||||||
|
{
|
||||||
|
"user_id": user_session.user_id,
|
||||||
|
"token": user_session.token.hex,
|
||||||
|
"access_level": user.access_level.value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def logout(self, request: Request) -> bool:
|
||||||
|
async with db_conn.async_session_manager() as session:
|
||||||
|
user_session_rep = UserSessionRepository(session)
|
||||||
|
await user_session_rep.delete_by_token(request.session["token"])
|
||||||
|
request.session.clear()
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def authenticate(self, request: Request) -> bool:
|
||||||
|
token = request.session.get("token")
|
||||||
|
if token:
|
||||||
|
async with db_conn.async_session_manager() as session:
|
||||||
|
user_session_rep = UserSessionRepository(session)
|
||||||
|
session = await user_session_rep.get_session_by_token(token)
|
||||||
|
if session and session.user_id != request.session["user_id"]:
|
||||||
|
session = None
|
||||||
|
else:
|
||||||
|
session = None
|
||||||
|
return bool(session)
|
||||||
|
|
||||||
|
|
||||||
|
def check_accesses_level(
|
||||||
|
access_level: int | AccessLevel, user_access_level: int | AccessLevel
|
||||||
|
):
|
||||||
|
if access_level > user_access_level:
|
||||||
|
return False
|
||||||
|
return True
|
0
src/helpers/admin/base/__init__.py
Normal file
0
src/helpers/admin/base/__init__.py
Normal file
17
src/helpers/admin/base/views.py
Normal file
17
src/helpers/admin/base/views.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from fastapi import Request
|
||||||
|
from sqladmin import ModelView
|
||||||
|
|
||||||
|
from helpers.admin.auth import check_accesses_level
|
||||||
|
from ..enums import AccessLevel
|
||||||
|
|
||||||
|
|
||||||
|
class CustomModelView(ModelView):
|
||||||
|
def is_accessible(self, request: Request) -> bool:
|
||||||
|
return check_accesses_level(
|
||||||
|
AccessLevel.support, request.session["access_level"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_visible(self, request: Request) -> bool:
|
||||||
|
return check_accesses_level(
|
||||||
|
AccessLevel.support, request.session["access_level"]
|
||||||
|
)
|
8
src/helpers/admin/enums.py
Normal file
8
src/helpers/admin/enums.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from enum import IntEnum
|
||||||
|
|
||||||
|
|
||||||
|
class AccessLevel(IntEnum):
|
||||||
|
user = 0
|
||||||
|
support = 1
|
||||||
|
moderator = 2
|
||||||
|
administrator = 3
|
34
src/helpers/logging.py
Normal file
34
src/helpers/logging.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from core.env import LOG_LEVEL, LOG_FORMAT
|
||||||
|
|
||||||
|
|
||||||
|
_log_format = LOG_FORMAT
|
||||||
|
|
||||||
|
|
||||||
|
def get_stream_handler(stream=sys.stderr) -> logging.StreamHandler:
|
||||||
|
stream_handler = logging.StreamHandler(stream)
|
||||||
|
return stream_handler
|
||||||
|
|
||||||
|
|
||||||
|
def get_logger(
|
||||||
|
name: str = "",
|
||||||
|
level: str = "INFO",
|
||||||
|
business_handler: bool = False,
|
||||||
|
log_format: str = _log_format,
|
||||||
|
) -> logging.Logger:
|
||||||
|
logger = logging.getLogger(name)
|
||||||
|
logger.setLevel(level)
|
||||||
|
handler = (
|
||||||
|
get_stream_handler()
|
||||||
|
if business_handler is False
|
||||||
|
else get_stream_handler(sys.stdout)
|
||||||
|
)
|
||||||
|
handler.setLevel(level)
|
||||||
|
handler.setFormatter(logging.Formatter(log_format))
|
||||||
|
logger.addHandler(handler)
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger(level=LOG_LEVEL)
|
0
src/repositories/__init__.py
Normal file
0
src/repositories/__init__.py
Normal file
163
src/repositories/base_repository.py
Normal file
163
src/repositories/base_repository.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
from typing import Type, TypeVar, Generic, Optional, Any, Sequence
|
||||||
|
|
||||||
|
from sqlalchemy import select, func, Row, RowMapping
|
||||||
|
from sqlalchemy.exc import NoResultFound
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRepository(Generic[T]):
|
||||||
|
"""
|
||||||
|
Base repository providing common CRUD operations for async sessions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, model: Type[T], session: AsyncSession):
|
||||||
|
self.model = model
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
async def get(
|
||||||
|
self,
|
||||||
|
pk: int = None,
|
||||||
|
filters: Optional[list] = None,
|
||||||
|
joins: Optional[list] = None,
|
||||||
|
left_joins: Optional[list[tuple[Any, Any]]] = None,
|
||||||
|
order_by=None,
|
||||||
|
options: Optional[list] = None,
|
||||||
|
) -> Optional[T]:
|
||||||
|
"""
|
||||||
|
Retrieve a single record by its ID or using optional filters, joins, ordering and loader options.
|
||||||
|
If 'id' is provided, it will be added as a filter.
|
||||||
|
"""
|
||||||
|
filters = filters or []
|
||||||
|
joins = joins or []
|
||||||
|
left_joins = left_joins or []
|
||||||
|
options = options or []
|
||||||
|
|
||||||
|
if pk is not None:
|
||||||
|
filters.append(self.model.id == pk)
|
||||||
|
|
||||||
|
query = select(self.model).where(*filters)
|
||||||
|
|
||||||
|
for join_item in joins:
|
||||||
|
query = query.join(join_item)
|
||||||
|
|
||||||
|
for left_join_item in left_joins:
|
||||||
|
target, condition = left_join_item
|
||||||
|
query = query.outerjoin(target, condition)
|
||||||
|
|
||||||
|
if order_by is not None:
|
||||||
|
query = query.order_by(order_by)
|
||||||
|
|
||||||
|
for opt in options:
|
||||||
|
query = query.options(opt)
|
||||||
|
|
||||||
|
result = await self.session.execute(query.with_for_update())
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def get_all(
|
||||||
|
self,
|
||||||
|
filters: Optional[list] = None,
|
||||||
|
joins: Optional[list] = None,
|
||||||
|
left_joins: Optional[list[tuple[Any, Any]]] = None,
|
||||||
|
limit: int = 10,
|
||||||
|
offset: int = 0,
|
||||||
|
order_by=None,
|
||||||
|
options: Optional[list] = None,
|
||||||
|
) -> Sequence[Row[Any] | RowMapping | Any]:
|
||||||
|
"""
|
||||||
|
Retrieve all records with optional filters, joins, pagination, ordering and loader options.
|
||||||
|
"""
|
||||||
|
filters = filters or []
|
||||||
|
joins = joins or []
|
||||||
|
left_joins = left_joins or []
|
||||||
|
options = options or []
|
||||||
|
|
||||||
|
query = select(self.model).where(*filters)
|
||||||
|
|
||||||
|
for join_item in joins:
|
||||||
|
query = query.join(join_item)
|
||||||
|
|
||||||
|
for left_join_item in left_joins:
|
||||||
|
target, condition = left_join_item
|
||||||
|
query = query.outerjoin(target, condition)
|
||||||
|
|
||||||
|
if order_by is not None:
|
||||||
|
if isinstance(order_by, list) or isinstance(order_by, tuple):
|
||||||
|
query = query.order_by(*order_by)
|
||||||
|
else:
|
||||||
|
query = query.order_by(order_by)
|
||||||
|
|
||||||
|
query = query.limit(limit).offset(offset)
|
||||||
|
|
||||||
|
for opt in options:
|
||||||
|
query = query.options(opt)
|
||||||
|
|
||||||
|
result = await self.session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def create(self, instance: T) -> T:
|
||||||
|
"""
|
||||||
|
Create a new record.
|
||||||
|
"""
|
||||||
|
self.session.add(instance)
|
||||||
|
await self.session.commit()
|
||||||
|
await self.session.refresh(instance)
|
||||||
|
return instance
|
||||||
|
|
||||||
|
async def bulk_create(self, instances: list[T]) -> None:
|
||||||
|
self.session.add_all(instances)
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
async def update(self, pk: int, **kwargs) -> T:
|
||||||
|
"""
|
||||||
|
Update a record by ID.
|
||||||
|
"""
|
||||||
|
obj = await self.get(pk)
|
||||||
|
if not obj:
|
||||||
|
raise NoResultFound(f"{self.model.__name__} with id {pk} not found")
|
||||||
|
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
setattr(obj, key, value)
|
||||||
|
|
||||||
|
await self.session.commit()
|
||||||
|
await self.session.refresh(obj)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
async def delete(self, pk: int) -> Optional[T]:
|
||||||
|
"""
|
||||||
|
Delete a record by ID.
|
||||||
|
"""
|
||||||
|
obj = await self.get(pk)
|
||||||
|
if not obj:
|
||||||
|
raise NoResultFound(f"{self.model.__name__} with id {pk} not found")
|
||||||
|
|
||||||
|
await self.session.delete(obj)
|
||||||
|
await self.session.commit()
|
||||||
|
return obj
|
||||||
|
|
||||||
|
async def count(
|
||||||
|
self,
|
||||||
|
filters: Optional[list] = None,
|
||||||
|
joins: Optional[list] = None,
|
||||||
|
left_joins: Optional[list[tuple[Any, Any]]] = None,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Count the number of records with optional filters and joins.
|
||||||
|
"""
|
||||||
|
filters = filters or []
|
||||||
|
joins = joins or []
|
||||||
|
left_joins = left_joins or []
|
||||||
|
|
||||||
|
query = select(func.count()).select_from(self.model).where(*filters)
|
||||||
|
|
||||||
|
for join_item in joins:
|
||||||
|
query = query.join(join_item)
|
||||||
|
|
||||||
|
for left_join_item in left_joins:
|
||||||
|
target, condition = left_join_item
|
||||||
|
query = query.outerjoin(target, condition)
|
||||||
|
|
||||||
|
result = await self.session.execute(query)
|
||||||
|
return result.scalar()
|
28
src/repositories/user_repository.py
Normal file
28
src/repositories/user_repository.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from pydantic import UUID4
|
||||||
|
from sqlalchemy import delete
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from db import User, UserSession
|
||||||
|
from repositories.base_repository import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class UserRepository(BaseRepository):
|
||||||
|
def __init__(self, session: AsyncSession):
|
||||||
|
super().__init__(User, session)
|
||||||
|
|
||||||
|
async def get_by_username(self, username: str):
|
||||||
|
return await self.get(filters=[self.model.username == username])
|
||||||
|
|
||||||
|
|
||||||
|
class UserSessionRepository(BaseRepository):
|
||||||
|
def __init__(self, session: AsyncSession):
|
||||||
|
super().__init__(UserSession, session)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_session_by_token(self, token: UUID4):
|
||||||
|
return await self.get(filters=[self.model.token == token])
|
||||||
|
|
||||||
|
async def delete_by_token(self, token: UUID4):
|
||||||
|
query = delete(self.model).where(self.model.token == token)
|
||||||
|
await self.session.execute(query)
|
||||||
|
await self.session.commit()
|
10
src/requirements.txt
Normal file
10
src/requirements.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
fastapi
|
||||||
|
asyncpg
|
||||||
|
uvicorn[standard]
|
||||||
|
sqlalchemy
|
||||||
|
psycopg2-binary
|
||||||
|
pydantic_settings
|
||||||
|
alembic
|
||||||
|
itsdangerous
|
||||||
|
sqladmin
|
||||||
|
bcrypt
|
Loading…
Reference in New Issue
Block a user