go to automatic migrations
This commit is contained in:
parent
feda68d5f8
commit
3a055456e6
4
.env.example
Normal file
4
.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
POSTGRES_HOST=localhost
|
||||
POSTGRES_DB=log
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=postgres
|
11
.gitignore
vendored
Normal file
11
.gitignore
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# python generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# venv
|
||||
.venv
|
||||
.env
|
119
alembic.ini
Normal file
119
alembic.ini
Normal file
@ -0,0 +1,119 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||
script_location = .\alembic\
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to .\alembic\/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:.\alembic\/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
# version_path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
1
alembic/README
Normal file
1
alembic/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
93
alembic/env.py
Normal file
93
alembic/env.py
Normal file
@ -0,0 +1,93 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
from src.database_adapter.model import Base
|
||||
from src.settings import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
config.set_main_option("sqlalchemy.url", settings.postgres_url)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
28
alembic/script.py.mako
Normal file
28
alembic/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
40
alembic/versions/cdf401a636ea_.py
Normal file
40
alembic/versions/cdf401a636ea_.py
Normal file
@ -0,0 +1,40 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: cdf401a636ea
|
||||
Revises:
|
||||
Create Date: 2025-03-14 04:16:49.143794
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'cdf401a636ea'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('log',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('datetime', sa.TIMESTAMP(timezone=True), nullable=False),
|
||||
sa.Column('action', sa.Enum('FIRST_ENTRY', 'REGISTRATION', 'LOGIN', 'LOGOUT', 'CREATE_THEME', 'ENTRY_THEME', 'DELETE_THEME', 'WRITE_MESSAGE', name='useractions'), nullable=False),
|
||||
sa.Column('object_id', sa.Integer(), nullable=True),
|
||||
sa.Column('response', sa.SMALLINT(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('log')
|
||||
# ### end Alembic commands ###
|
11
compose.yaml
11
compose.yaml
@ -5,20 +5,21 @@ services:
|
||||
container_name: farpost-database
|
||||
restart: always
|
||||
image: postgres:latest
|
||||
env_file: .env
|
||||
volumes:
|
||||
- ./dump.sql:/docker-entrypoint-initdb.d/dump.sql:ro
|
||||
- ./dumps/dump.sql:/docker-entrypoint-initdb.d/dump.sql:ro
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: pg_isready -d log
|
||||
test: pg_isready -d ${POSTGRES_DB:-log}
|
||||
interval: 60s
|
||||
start_period: 1s
|
||||
timeout: 600s
|
||||
ports:
|
||||
- 5432:5432
|
||||
environment:
|
||||
POSTGRES_DB: log
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: ${POSTGRES_DB:-log}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
|
||||
|
||||
volumes:
|
||||
|
75
dump.sql
75
dump.sql
@ -1,75 +0,0 @@
|
||||
--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 17.3 (Debian 17.3-3.pgdg120+1)
|
||||
-- Dumped by pg_dump version 17.3 (Debian 17.3-3.pgdg120+1)
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET transaction_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- Name: user_actions; Type: TYPE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TYPE public.user_actions AS ENUM (
|
||||
'first_entry',
|
||||
'registration',
|
||||
'login',
|
||||
'logout',
|
||||
'create_theme',
|
||||
'entry_theme',
|
||||
'delete_theme',
|
||||
'write_message'
|
||||
);
|
||||
|
||||
|
||||
ALTER TYPE public.user_actions OWNER TO postgres;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: log; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.log (
|
||||
id integer NOT NULL,
|
||||
user_id integer NOT NULL,
|
||||
action public.user_actions NOT NULL,
|
||||
datetime timestamp with time zone DEFAULT (now() AT TIME ZONE 'EAST'::text) NOT NULL,
|
||||
object_id integer,
|
||||
response smallint NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.log OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Data for Name: log; Type: TABLE DATA; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Name: log log_pk; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.log
|
||||
ADD CONSTRAINT log_pk PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
137
dumps/dump.sql
Normal file
137
dumps/dump.sql
Normal file
@ -0,0 +1,137 @@
|
||||
--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 17.3 (Debian 17.3-3.pgdg120+1)
|
||||
-- Dumped by pg_dump version 17.3 (Debian 17.3-3.pgdg120+1)
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET transaction_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- Name: useractions; Type: TYPE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TYPE public.useractions AS ENUM (
|
||||
'FIRST_ENTRY',
|
||||
'REGISTRATION',
|
||||
'LOGIN',
|
||||
'LOGOUT',
|
||||
'CREATE_THEME',
|
||||
'ENTRY_THEME',
|
||||
'DELETE_THEME',
|
||||
'WRITE_MESSAGE'
|
||||
);
|
||||
|
||||
|
||||
ALTER TYPE public.useractions OWNER TO postgres;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: alembic_version; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.alembic_version (
|
||||
version_num character varying(32) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.alembic_version OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: log; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.log (
|
||||
id integer NOT NULL,
|
||||
user_id integer NOT NULL,
|
||||
datetime timestamp with time zone NOT NULL,
|
||||
action public.useractions NOT NULL,
|
||||
object_id integer,
|
||||
response smallint NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.log OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: log_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.log_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER SEQUENCE public.log_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: log_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.log_id_seq OWNED BY public.log.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: log id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.log ALTER COLUMN id SET DEFAULT nextval('public.log_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Data for Name: alembic_version; Type: TABLE DATA; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
INSERT INTO public.alembic_version (version_num) VALUES ('cdf401a636ea');
|
||||
|
||||
|
||||
--
|
||||
-- Data for Name: log; Type: TABLE DATA; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Name: log_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
SELECT pg_catalog.setval('public.log_id_seq', 1, false);
|
||||
|
||||
|
||||
--
|
||||
-- Name: alembic_version alembic_version_pkc; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.alembic_version
|
||||
ADD CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num);
|
||||
|
||||
|
||||
--
|
||||
-- Name: log log_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.log
|
||||
ADD CONSTRAINT log_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
29
pyproject.toml
Normal file
29
pyproject.toml
Normal file
@ -0,0 +1,29 @@
|
||||
[project]
|
||||
name = "data-engineer-farpost"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
authors = [
|
||||
{ name = "matv864", email = "matv864@gmail.com" }
|
||||
]
|
||||
dependencies = [
|
||||
"sqlalchemy>=2.0.39",
|
||||
"asyncpg>=0.30.0",
|
||||
"alembic>=1.15.1",
|
||||
"pydantic-settings>=2.8.1",
|
||||
]
|
||||
readme = "README.md"
|
||||
requires-python = ">= 3.12"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.rye]
|
||||
managed = true
|
||||
dev-dependencies = []
|
||||
|
||||
[tool.hatch.metadata]
|
||||
allow-direct-references = true
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/data_engineer_farpost"]
|
40
requirements-dev.lock
Normal file
40
requirements-dev.lock
Normal file
@ -0,0 +1,40 @@
|
||||
# generated by rye
|
||||
# use `rye lock` or `rye sync` to update this lockfile
|
||||
#
|
||||
# last locked with the following flags:
|
||||
# pre: false
|
||||
# features: []
|
||||
# all-features: false
|
||||
# with-sources: false
|
||||
# generate-hashes: false
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
alembic==1.15.1
|
||||
# via data-engineer-farpost
|
||||
annotated-types==0.7.0
|
||||
# via pydantic
|
||||
asyncpg==0.30.0
|
||||
# via data-engineer-farpost
|
||||
greenlet==3.1.1
|
||||
# via sqlalchemy
|
||||
mako==1.3.9
|
||||
# via alembic
|
||||
markupsafe==3.0.2
|
||||
# via mako
|
||||
pydantic==2.10.6
|
||||
# via pydantic-settings
|
||||
pydantic-core==2.27.2
|
||||
# via pydantic
|
||||
pydantic-settings==2.8.1
|
||||
# via data-engineer-farpost
|
||||
python-dotenv==1.0.1
|
||||
# via pydantic-settings
|
||||
sqlalchemy==2.0.39
|
||||
# via alembic
|
||||
# via data-engineer-farpost
|
||||
typing-extensions==4.12.2
|
||||
# via alembic
|
||||
# via pydantic
|
||||
# via pydantic-core
|
||||
# via sqlalchemy
|
40
requirements.lock
Normal file
40
requirements.lock
Normal file
@ -0,0 +1,40 @@
|
||||
# generated by rye
|
||||
# use `rye lock` or `rye sync` to update this lockfile
|
||||
#
|
||||
# last locked with the following flags:
|
||||
# pre: false
|
||||
# features: []
|
||||
# all-features: false
|
||||
# with-sources: false
|
||||
# generate-hashes: false
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
alembic==1.15.1
|
||||
# via data-engineer-farpost
|
||||
annotated-types==0.7.0
|
||||
# via pydantic
|
||||
asyncpg==0.30.0
|
||||
# via data-engineer-farpost
|
||||
greenlet==3.1.1
|
||||
# via sqlalchemy
|
||||
mako==1.3.9
|
||||
# via alembic
|
||||
markupsafe==3.0.2
|
||||
# via mako
|
||||
pydantic==2.10.6
|
||||
# via pydantic-settings
|
||||
pydantic-core==2.27.2
|
||||
# via pydantic
|
||||
pydantic-settings==2.8.1
|
||||
# via data-engineer-farpost
|
||||
python-dotenv==1.0.1
|
||||
# via pydantic-settings
|
||||
sqlalchemy==2.0.39
|
||||
# via alembic
|
||||
# via data-engineer-farpost
|
||||
typing-extensions==4.12.2
|
||||
# via alembic
|
||||
# via pydantic
|
||||
# via pydantic-core
|
||||
# via sqlalchemy
|
6
src/database_adapter/engine.py
Normal file
6
src/database_adapter/engine.py
Normal file
@ -0,0 +1,6 @@
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||
|
||||
from src.settings import settings
|
||||
|
||||
engine = create_async_engine(settings.postgres_url)
|
||||
async_session_maker_db1 = async_sessionmaker(engine, expire_on_commit=False)
|
34
src/database_adapter/model.py
Normal file
34
src/database_adapter/model.py
Normal file
@ -0,0 +1,34 @@
|
||||
from enum import Enum
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import SMALLINT, INTEGER, TIMESTAMP
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
def utc_signed_now():
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
class UserActions(Enum):
|
||||
FIRST_ENTRY=0
|
||||
REGISTRATION=1
|
||||
LOGIN=2
|
||||
LOGOUT=3
|
||||
CREATE_THEME=4
|
||||
ENTRY_THEME=5
|
||||
DELETE_THEME=6
|
||||
WRITE_MESSAGE=7
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
class Log(Base):
|
||||
__tablename__ = "log"
|
||||
id: Mapped[int] = mapped_column(INTEGER, primary_key=True, autoincrement=True)
|
||||
user_id: Mapped[int]
|
||||
date_time: Mapped[datetime] = mapped_column(
|
||||
"datetime", TIMESTAMP(timezone=True), default=utc_signed_now
|
||||
)
|
||||
action: Mapped[UserActions]
|
||||
object_id: Mapped[int] = mapped_column(nullable=True)
|
||||
response: Mapped[int] = mapped_column(SMALLINT)
|
0
src/script.py
Normal file
0
src/script.py
Normal file
24
src/settings.py
Normal file
24
src/settings.py
Normal file
@ -0,0 +1,24 @@
|
||||
from functools import cached_property
|
||||
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
|
||||
|
||||
POSTGRES_HOST: str
|
||||
POSTGRES_DB: str
|
||||
POSTGRES_USER: str
|
||||
POSTGRES_PASSWORD: str
|
||||
|
||||
@cached_property
|
||||
def postgres_url(self):
|
||||
return (
|
||||
"postgresql+asyncpg://"
|
||||
+ f"{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@"
|
||||
+ f"{self.POSTGRES_HOST}/{self.POSTGRES_DB}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
settings = Settings() # type: ignore
|
@ -24,8 +24,8 @@
|
||||
CREATE TABLE public.log (
|
||||
id integer NOT NULL,
|
||||
user_id integer NOT NULL,
|
||||
action public.user_actions NOT NULL,
|
||||
datetime timestamp with time zone DEFAULT (now() AT TIME ZONE 'EAST'::text) NOT NULL,
|
||||
datetime timestamp with time zone NOT NULL,
|
||||
action public.useractions NOT NULL,
|
||||
object_id integer,
|
||||
response smallint NOT NULL
|
||||
);
|
||||
|
Loading…
x
Reference in New Issue
Block a user