start project

This commit is contained in:
Ivanov Matvey 2025-03-13 17:54:38 +10:00
commit 4b0673c467
44 changed files with 1419 additions and 0 deletions

31
.dockerignore Normal file
View File

@ -0,0 +1,31 @@
# python generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# venv
.venv
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# ruff
.ruff_cache/
# rye
requirements-dev.lock
# Docker compose
compose.yaml
compose-dev.yaml
# minio and db dump
dumps
# env (pass it thru compose)
.env

5
.env.example Normal file
View File

@ -0,0 +1,5 @@
POSTGRES_HOST=localhost
POSTGRES_DB1=db1
POSTGRES_DB2=db2
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres

28
.gitignore vendored Normal file
View File

@ -0,0 +1,28 @@
# python generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
.idea
# venv
.venv
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# ruff
.ruff_cache/
# rye
requirements-dev.lock
# evironment
*.env
db.yaml
# object storage
dumps/minio/*

100
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,100 @@
stages:
- lint
- build
- backup
- deploy
.configure_ssh:
before_script:
# Run ssh-agent for keys management
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
# Add place for ssh related files
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
# Initialize token
- chmod 400 "$SSH_PRIVATE_KEY"
- ssh-add "$SSH_PRIVATE_KEY"
# Add server fingerprint to known hosts
- ssh-keyscan "$SSH_HOST" >> ~/.ssh/known_hosts
- chmod 644 ~/.ssh/known_hosts
.on_merge_request:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- when: never
lint-ruff:
stage: lint
image: registry.gitlab.com/pipeline-components/ruff:latest
rules:
- !reference [.on_merge_request, rules]
script:
- echo "☕ Linting with ruff"
- ruff check --output-format=gitlab src/
- echo "✅ Passed"
lint-mypy:
stage: lint
image: python:3.12
rules:
- !reference [.on_merge_request, rules]
before_script:
- pip install mypy
- apt install make
- make deps
script:
- echo "🐍 Typechecking with mypy"
- mypy src
- echo "✅ Passed"
build:
stage: build
image: docker:latest
services:
- docker:dind
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
script:
- docker pull $CI_REGISTRY_IMAGE:latest || true
- docker build --target prod --build-arg BUILDKIT_INLINE_CACHE=1 --cache-from $CI_REGISTRY_IMAGE:latest --tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA --tag $CI_REGISTRY_IMAGE:latest .
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
- docker push $CI_REGISTRY_IMAGE:latest
database-backup:
stage: backup
image: ubuntu:latest
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
before_script:
- !reference [.configure_ssh, before_script]
script:
- echo "💾 backuping database"
- ssh $SSH_USER@$SSH_HOST "docker exec database pg_dump --column-inserts udom >> pre_deploy.sql"
- echo "✅ Passed"
deploy-dev:
stage: deploy
image: ubuntu:latest
rules:
- if: '$CI_COMMIT_BRANCH == "dev"'
before_script:
- !reference [.configure_ssh, before_script]
script:
- echo "🚀🧨 Deploing dev changes"
- ssh $SSH_USER@$SSH_HOST "cd /root/udom_dev/ && git pull && docker compose -f compose-dev.yaml up -d --build --remove-orphans"
- echo "✅ Passed"
deploy-main:
stage: deploy
image: ubuntu:latest
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
before_script:
- !reference [.configure_ssh, before_script]
script:
- echo "🚀 Deploing changes"
- ssh $SSH_USER@$SSH_HOST "cd /root/udom/ && git pull && echo $SERVER_TOKEN | docker login registry.gitlab.com -u 'Server' --password-stdin && docker compose pull && docker compose up -d --build --remove-orphans"
- echo "✅ Passed"

17
Dockerfile Normal file
View File

@ -0,0 +1,17 @@
FROM python:3.12-alpine AS base
WORKDIR /backend
# README.md needed to hatchling build
COPY pyproject.toml requirements.lock README.md .
RUN pip install uv --no-cache
RUN uv pip install --no-cache --system -r requirements.lock
COPY . .
FROM base AS dev
CMD ["granian", "--interface", "asgi", "src/app.py", "--reload", "--host", "0.0.0.0"]
FROM base AS prod
CMD ["granian", "--interface", "asgi", "src/app.py", "--host", "0.0.0.0"]

25
Makefile Normal file
View File

@ -0,0 +1,25 @@
deps:
pip install uv
uv pip install --no-cache --system -r requirements.lock
deps-dev:
rye sync
lint:
rye run ruff check src/
rye run mypy src/
migrate:
rye run alembic revision --autogenerate
rye run alembic upgrade head
format:
rye run isort src/
rye run ruff format src/
rye run black src/
run:
fastapi run src/app.py --host 0.0.0.0 --reload
run-dev:
rye run fastapi dev src/app.py --reload

15
README.md Normal file
View File

@ -0,0 +1,15 @@
# python_dev
# launch
```docker compose up -d```
# migrations
```cd alembic/db1 && alembic revision --autogenerate && alembic upgrade head```
```cd alembic/db2 && alembic revision --autogenerate && alembic upgrade head```
# что не по ТЗ
- мне не очень понравилось, что space_type и event_type сделаны через отдельные таблицы
- потому что работать с этой таблицей будет бэкенд, и у нас есть различные API хэндлеры, которым, чтобы создать запись в БД нужно
сходить на дочерние таблицы, найти нужный тип (например event_type - login), взять от него id, прийти назад и создать запись с нужным id, при этом это ещё будет не надёжным (кто-то удалит тип, поменяет название, всё поляжет) + а зачем нам отдельная таблица? (я в том смысле, что над этими типа у нас есть все операции круд, но без изменения кода бэкенда - это либо бесполезно, либо опасно)
- я заменил на более удобные Enum

115
alembic/db1/alembic.ini Normal file
View File

@ -0,0 +1,115 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = .
; script_location = alembic/db2
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

90
alembic/db1/env.py Normal file
View File

@ -0,0 +1,90 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
from src.settings import settings
from src.adapters.database.models import BaseDB1
from src.adapters.database.models.db1.post import * # noqa: F401
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option("sqlalchemy.url", settings.postgres_url_db1)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = BaseDB1.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

115
alembic/db2/alembic.ini Normal file
View File

@ -0,0 +1,115 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = .
; script_location = alembic/db2
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

92
alembic/db2/env.py Normal file
View File

@ -0,0 +1,92 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
from src.settings import settings
from src.adapters.database.models import BaseDB2
from src.adapters.database.models.db2.log import Log # noqa: F401
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option("sqlalchemy.url", settings.postgres_url_db2)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = BaseDB2.metadata
print(BaseDB2.metadata, str(BaseDB2))
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,37 @@
"""empty message
Revision ID: 581af68b8c68
Revises:
Create Date: 2025-03-13 17:43:53.879984
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '581af68b8c68'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('log',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('datetime', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('space_type', sa.Enum('GLOBAL', 'BLOG', 'POST', name='spacetype'), nullable=False),
sa.Column('event_type', sa.Enum('LOGIN', 'COMMENT', 'CREATE_POST', 'DELETE_POST', 'LOGOUT', name='eventtype'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('log')
# ### end Alembic commands ###

52
compose.yaml Normal file
View File

@ -0,0 +1,52 @@
name: python_dev-farpost
services:
backend:
container_name: farpost-backend
restart: always
build: .
env_file: .env
healthcheck:
test: curl -sSf http://localhost:8000/healthcheck
interval: 60s
start_period: 1s
timeout: 600s
depends_on:
database:
condition: service_healthy
restart: true
ports:
- 8000:8000
environment:
POSTGRES_HOST: ${POSTGRES_HOST}
POSTGRES_DB1: ${POSTGRES_DB1:-db1}
POSTGRES_DB2: ${POSTGRES_DB2:-db2}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
database:
container_name: farpost-database
restart: always
image: postgres:latest
env_file: .env
volumes:
- ./database/create-multiple-postgresql-databases.sh:/docker-entrypoint-initdb.d/launch.sh:ro
- ./database/dumps:/docker-entrypoint-initdb.d/dumps:ro
- postgres-data:/var/lib/postgresql/data
healthcheck:
test: pg_isready -d ${POSTGRES_DB1:-db1}
interval: 60s
start_period: 1s
timeout: 600s
ports:
- 5432:5432
environment:
POSTGRES_MULTIPLE_DATABASES: ${POSTGRES_DB1:-db1}, ${POSTGRES_DB2:-db2}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
volumes:
postgres-data:
name: farpost-postgres-data

View File

@ -0,0 +1,27 @@
#!/bin/bash
set -e
set -u
function create_user_and_database() {
local database=$1
echo " Creating user '$POSTGRES_USER' and database '$database'"
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL
CREATE DATABASE $database;
GRANT ALL PRIVILEGES ON DATABASE $database TO $POSTGRES_USER;
EOSQL
}
function import_dump() {
local database=$1
psql -U "$POSTGRES_USER" -d "$database" -a -f "docker-entrypoint-initdb.d/$database.sql"
}
if [ -n "$POSTGRES_MULTIPLE_DATABASES" ]; then
echo "Multiple database creation requested: $POSTGRES_MULTIPLE_DATABASES"
for db in $(echo $POSTGRES_MULTIPLE_DATABASES | tr ',' ' '); do
create_user_and_database $db
# import_dump $db
done
echo "Multiple databases created"
fi

0
database/dumps/db1.sql Normal file
View File

0
database/dumps/db2.sql Normal file
View File

43
pyproject.toml Normal file
View File

@ -0,0 +1,43 @@
[project]
name = "python_dev"
version = "0.1.0"
description = "Add your description here"
authors = [
{ name = "matv864", email = "matv864@gmail.com" }
]
dependencies = [
"fastapi>=0.114.0",
"sqlalchemy>=2.0.34",
"pydantic-settings>=2.4.0",
"alembic>=1.13.2",
"psycopg2-binary>=2.9.9",
"aioboto3>=13.1.1",
"asyncpg>=0.29.0",
"python-dotenv>=1.0.1",
"fastadmin>=0.2.13",
"bcrypt>=4.2.0",
"passlib>=1.7.4",
"pydantic[email]>=2.9.2",
"beautifulsoup4>=4.12.3",
"granian[reload]>=1.7.6"
]
readme = "README.md"
requires-python = ">= 3.12"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.rye]
managed = true
dev-dependencies = [
"ruff>=0.6.4",
"mypy>=1.11.2"
]
[tool.hatch.metadata]
allow-direct-references = true
[tool.hatch.build.targets.wheel]
packages = ["src/"]

128
requirements.lock Normal file
View File

@ -0,0 +1,128 @@
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
# universal: false
-e file:.
aioboto3==13.1.1
# via python-dev
aiobotocore==2.13.1
# via aioboto3
aiofiles==24.1.0
# via aioboto3
aiohappyeyeballs==2.4.0
# via aiohttp
aiohttp==3.10.5
# via aiobotocore
aioitertools==0.12.0
# via aiobotocore
aiosignal==1.3.1
# via aiohttp
alembic==1.13.2
# via python-dev
annotated-types==0.7.0
# via pydantic
anyio==4.4.0
# via starlette
# via watchfiles
asgiref==3.8.1
# via fastadmin
asyncpg==0.29.0
# via python-dev
attrs==24.2.0
# via aiohttp
bcrypt==4.2.0
# via python-dev
beautifulsoup4==4.12.3
# via python-dev
boto3==1.34.131
# via aiobotocore
botocore==1.34.131
# via aiobotocore
# via boto3
# via s3transfer
click==8.1.7
# via granian
colorama==0.4.6
# via click
dnspython==2.7.0
# via email-validator
email-validator==2.2.0
# via pydantic
fastadmin==0.2.13
# via python-dev
fastapi==0.114.1
# via python-dev
frozenlist==1.4.1
# via aiohttp
# via aiosignal
granian==1.7.6
# via python-dev
greenlet==3.1.0
# via sqlalchemy
idna==3.8
# via anyio
# via email-validator
# via yarl
jmespath==1.0.1
# via boto3
# via botocore
mako==1.3.5
# via alembic
markupsafe==2.1.5
# via mako
multidict==6.1.0
# via aiohttp
# via yarl
passlib==1.7.4
# via python-dev
psycopg2-binary==2.9.9
# via python-dev
pydantic==2.9.2
# via fastapi
# via pydantic-settings
# via python-dev
pydantic-core==2.23.4
# via pydantic
pydantic-settings==2.5.2
# via python-dev
pyjwt==2.9.0
# via fastadmin
python-dateutil==2.9.0.post0
# via botocore
python-dotenv==1.0.1
# via pydantic-settings
# via python-dev
s3transfer==0.10.2
# via boto3
six==1.16.0
# via python-dateutil
sniffio==1.3.1
# via anyio
soupsieve==2.6
# via beautifulsoup4
sqlalchemy==2.0.34
# via alembic
# via python-dev
starlette==0.38.5
# via fastapi
typing-extensions==4.12.2
# via alembic
# via fastapi
# via pydantic
# via pydantic-core
# via sqlalchemy
urllib3==2.2.3
# via botocore
watchfiles==1.0.4
# via granian
wrapt==1.16.0
# via aiobotocore
yarl==1.11.1
# via aiohttp

0
src/__init__.py Normal file
View File

View File

@ -0,0 +1,2 @@
from .db1.base import Base as BaseDB1
from .db2.base import Base as BaseDB2

View File

@ -0,0 +1,17 @@
from enum import Enum
from datetime import datetime, timezone
def utc_signed_now():
return datetime.now(timezone.utc)
class SpaceType(Enum):
GLOBAL=0
BLOG=1
POST=2
class EventType(Enum):
LOGIN=0
COMMENT=1
CREATE_POST=2
DELETE_POST=3
LOGOUT=4

View File

@ -0,0 +1,10 @@
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class Base(AsyncAttrs, DeclarativeBase):
"""
Base class that provides metadata and id with int4
"""
id: Mapped[int] = mapped_column(autoincrement=True, primary_key=True)

View File

View File

View File

View File

@ -0,0 +1,11 @@
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class Base(AsyncAttrs, DeclarativeBase):
"""
Base class that provides metadata and id with int4
"""
id: Mapped[int] = mapped_column(autoincrement=True, primary_key=True)

View File

@ -0,0 +1,16 @@
from datetime import datetime
from sqlalchemy import INTEGER, TIMESTAMP
from sqlalchemy.orm import Mapped, mapped_column
from .base import Base
from ..common import utc_signed_now, SpaceType, EventType
class Log(Base):
__tablename__ = "log"
id: Mapped[int] = mapped_column(INTEGER, primary_key=True, autoincrement=True)
date_time: Mapped[datetime] = mapped_column("datetime", TIMESTAMP(timezone=True), default=utc_signed_now)
user_id: Mapped[int]
space_type: Mapped[SpaceType]
event_type: Mapped[EventType]

View File

@ -0,0 +1,23 @@
from datetime import datetime, timedelta, timezone
from typing import Optional
from uuid import UUID
from sqlalchemy import asc, desc, func, select
from sqlalchemy.sql.expression import nulls_last
from sqlalchemy.sql.selectable import Select
from src.settings import settings
from src.utils.exceptions import (
RefreshClientInfoIncorrect,
RefreshException,
RefreshExpired,
)
from src.utils.repository import (
SQLAlchemyRepository,
_sentinel,
)
# from .models import (
# )

View File

@ -0,0 +1,10 @@
from sqlalchemy.ext.asyncio import AsyncSession
# from src.adapters.database.repositories import (
# )
class RepositoriesGateway:
def __init__(self, session: AsyncSession):
pass

View File

@ -0,0 +1,8 @@
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from src.settings import settings
engine_db1 = create_async_engine(settings.postgres_url_db1)
engine_db2 = create_async_engine(settings.postgres_url_db2)
async_session_maker_db1 = async_sessionmaker(engine_db1, expire_on_commit=False)
async_session_maker_db2 = async_sessionmaker(engine_db2, expire_on_commit=False)

1
src/api/__init__.py Normal file
View File

@ -0,0 +1 @@
from .healthcheck import healthcheck_router as healthcheck_router

8
src/api/healthcheck.py Normal file
View File

@ -0,0 +1,8 @@
from fastapi import APIRouter, status
healthcheck_router = APIRouter()
@healthcheck_router.get("/healthcheck", status_code=status.HTTP_200_OK)
async def healthcheck():
return None

23
src/app.py Normal file
View File

@ -0,0 +1,23 @@
from fastapi import APIRouter, FastAPI
from fastapi.middleware.cors import CORSMiddleware
from src.api import healthcheck_router
app = FastAPI(
title="python_dev-backend",
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(healthcheck_router)
main_app_router = APIRouter(prefix="/api")
app.include_router(main_app_router)

32
src/settings.py Normal file
View File

@ -0,0 +1,32 @@
from functools import cached_property
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
POSTGRES_HOST: str
POSTGRES_DB1: str = "db1"
POSTGRES_DB2: str = "db2"
POSTGRES_USER: str
POSTGRES_PASSWORD: str
@cached_property
def postgres_url_db1(self):
return (
"postgresql+asyncpg://"
+ f"{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@"
+ f"{self.POSTGRES_HOST}/{self.POSTGRES_DB1}"
)
@cached_property
def postgres_url_db2(self):
return (
"postgresql+asyncpg://"
+ f"{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@"
+ f"{self.POSTGRES_HOST}/{self.POSTGRES_DB2}"
)
settings = Settings() # type: ignore

44
src/unit_of_work.py Normal file
View File

@ -0,0 +1,44 @@
from asyncio import shield
from src.adapters.database.repository_gateway import (
RepositoriesGateway,
RepositoriesGatewayProtocol,
)
from src.adapters.database.session import async_session_maker
from src.adapters.filestorage.repository import (
FileStorageProtocol,
FileStorageRepository,
)
from src.adapters.filestorage.session import s3_session_factory
from src.adapters.verification import VerificationGateway, VerificationProtocol
from src.utils.unit_of_work import UnitOfWorkProtocol
class UnitOfWork(UnitOfWorkProtocol):
file_storage: FileStorageProtocol
repositories: RepositoriesGatewayProtocol
verifications: VerificationProtocol
def __init__(self):
self.db_session_factory = async_session_maker
self.s3_session_facotry = s3_session_factory
async def __aenter__(self):
self.db_session = self.db_session_factory()
self.s3_session = self.s3_session_facotry()
self.file_storage = FileStorageRepository(self.s3_session)
self.repositories = RepositoriesGateway(self.db_session)
self.verifications = VerificationGateway()
return self
async def __aexit__(self, *args):
await self.rollback()
await shield(self.db_session.close())
async def commit(self):
await self.db_session.commit()
async def rollback(self):
await self.db_session.rollback()

0
src/utils/__init__.py Normal file
View File

20
src/utils/dependencies.py Normal file
View File

@ -0,0 +1,20 @@
from datetime import datetime, timezone
from typing import Annotated
from fastapi import Depends
from fastapi.security import OAuth2PasswordBearer
from src.adapters.database.models.clients import ClientType
from src.adapters.jwt_token import JwtToken
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="authenticaition", auto_error=False)
async def provide_jwt_token(
encoded_token: Annotated[str | None, Depends(oauth2_scheme)] = None,
) -> JwtToken:
if encoded_token is None:
return JwtToken(
exp=datetime.now(timezone.utc), client_id=0, client_type=ClientType.individ
)
return JwtToken.decode(encoded_token)

52
src/utils/exceptions.py Normal file
View File

@ -0,0 +1,52 @@
class RepositoryException(Exception): ...
class AccessDenied(Exception): ...
class ResultNotFound(RepositoryException): ...
class AuthorizationException(Exception): ...
class WrongCredentials(RepositoryException): ...
class ForeignKeyError(RepositoryException): ...
class JwtException(AuthorizationException): ...
class JwtExpired(JwtException): ...
class JwtInvalid(JwtException): ...
class RefreshException(AuthorizationException): ...
class RefreshExpired(RefreshException): ...
class RefreshInvalid(RefreshException): ...
class VerificationException(Exception): ...
class RefreshClientInfoIncorrect(RefreshException): ...
class FileNotFound(Exception): ...
class UserNotRegistered(Exception): ...
class PlaceOrderForeignKeyError(Exception): ...
class UserAlreadyExist(Exception): ...

143
src/utils/repository.py Normal file
View File

@ -0,0 +1,143 @@
from abc import abstractmethod
from typing import Any, Optional, Protocol
from sqlalchemy import func, insert, select, update
from sqlalchemy.exc import IntegrityError, NoResultFound
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.sql.base import ExecutableOption
from src.utils.exceptions import ForeignKeyError, ResultNotFound
_sentinel: Any = object()
class AbstractRepository(Protocol):
@abstractmethod
async def add_one(self, **data):
raise NotImplementedError
class SQLAlchemyRepository(AbstractRepository):
model = _sentinel
def __init__(self, session: AsyncSession):
self.session = session
async def add_one(self, **data):
stmt = insert(self.model).values(**data).returning(self.model)
try:
res = await self.session.execute(stmt)
return res.scalar_one()
except IntegrityError:
raise ForeignKeyError
async def edit_one(self, id: int, **data):
stmt = update(self.model).values(**data).filter_by(id=id).returning(self.model)
try:
res = await self.session.execute(stmt)
return res.unique().scalar_one()
except NoResultFound:
raise ResultNotFound
except IntegrityError:
raise ForeignKeyError
async def find_all(self):
stmt = select(self.model).options(*self.get_select_options())
res = await self.session.execute(stmt)
return res.unique().scalars().fetchall()
async def find_filtered(self, sort_by: str = "", **filter_by):
stmt = (
select(self.model)
.options(*self.get_select_options())
.filter_by(**filter_by)
.order_by(getattr(self.model, sort_by, None))
)
res = await self.session.execute(stmt)
return res.unique().scalars().fetchall()
async def find_filtered_and_paginated(self, page: int, limit: int, **filter_by):
stmt = (
select(self.model)
.options(*self.get_select_options())
.filter_by(**filter_by)
.offset((page - 1) * limit)
.limit(limit)
)
res = await self.session.execute(stmt)
return res.unique().scalars().fetchall()
async def find_one(self, **filter_by):
stmt = (
select(self.model)
.options(*self.get_select_options())
.filter_by(**filter_by)
)
res = await self.session.execute(stmt)
try:
return res.scalar_one()
except NoResultFound:
raise ResultNotFound
async def count_filtered(self, **filter_by):
stmt = (
select(func.count())
.select_from(self.model)
.options(*self.get_select_options())
.filter_by(**filter_by)
)
res = await self.session.execute(stmt)
return res.unique().scalar_one()
async def find_filtered_in(self, column_name: str, values: list):
stmt = (
select(self.model)
.options(*self.get_select_options())
.filter(getattr(self.model, column_name).in_(values))
)
res = await self.session.execute(stmt)
return res.unique().scalars().fetchall()
# this find operation in delete methods help us to raise 404 error instead of 50x
async def delete_one(self, id: int) -> None:
await self.session.delete((await self.find_one(id=id)))
async def delete_filtered(self, **filter_by) -> None:
for cart_item in await self.find_filtered(**filter_by):
await self.session.delete(cart_item)
def get_select_options(self) -> list[ExecutableOption]:
return []
async def count_filtered_by_fastadmin(
self,
joins: list[Any],
filters: list[Any],
):
stmt = select(func.count()).select_from(self.model).filter(*filters)
for join in joins:
stmt = stmt.join(join)
res = await self.session.execute(stmt)
return res.unique().scalar_one()
async def find_filtered_by_fastadmin(
self,
options: list[Any],
joins: list[Any],
filters: list[Any],
sort_by: Optional[Any],
offset: int,
limit: int,
):
stmt = select(self.model).filter(*filters).offset(offset).limit(limit)
if sort_by is not None:
stmt = stmt.order_by(sort_by)
for join in joins:
stmt = stmt.join(join)
stmt = stmt.options(*options)
res = await self.session.execute(stmt)
return res.scalars()

5
src/utils/time.py Normal file
View File

@ -0,0 +1,5 @@
from datetime import datetime, timezone
def utc_signed_now():
return datetime.now(timezone.utc)

View File

@ -0,0 +1,13 @@
import base64
import binascii
def is_valid_base64(value: str) -> bool:
try:
value = value.split(";base64,")[1]
base64.decodebytes(value.encode("ascii"))
return True
except binascii.Error:
return False
except IndexError:
return False

View File

@ -0,0 +1,9 @@
from abc import abstractmethod
from typing import Protocol
class VerificationProtocol(Protocol):
@abstractmethod
async def send_verification_code(self, phone_number: str) -> int:
"""Sends verification code to user and returns sended code"""
raise NotImplementedError