Init
This commit is contained in:
+125
@@ -0,0 +1,125 @@
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.idea/
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
.env.development
|
||||
.env.production
|
||||
.env.local
|
||||
.env.staging
|
||||
|
||||
# virtualenv
|
||||
.venv
|
||||
venv/
|
||||
ENV/
|
||||
.vscode
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
.DS_Store
|
||||
*.sqlite3
|
||||
media/
|
||||
*.pyc
|
||||
*.db
|
||||
*.pid
|
||||
|
||||
# Ignore Django Migrations in Development if you are working on team
|
||||
|
||||
# Only for Development only
|
||||
# **/migrations/**
|
||||
# !**/migrations
|
||||
# !**/migrations/__init__.py
|
||||
# Ignore Vim swap files
|
||||
*.sw[op]
|
||||
*~
|
||||
static/
|
||||
@@ -0,0 +1,17 @@
|
||||
# Template for fastapi backend for aayutech projects
|
||||
|
||||
## Libraries used
|
||||
1. Fastapi
|
||||
2. Alembic
|
||||
3. Postgresql with sqlalchemy
|
||||
4. Pydantic
|
||||
|
||||
## Features
|
||||
1. Base user and /login using Oauth2
|
||||
2. Api logging in audit table table
|
||||
3. Model base class including soft delete, created_at, updated_at
|
||||
4. Model auditing by default
|
||||
5. Static files routing
|
||||
|
||||
## TODO:
|
||||
5. Optional multitenant
|
||||
+149
@@ -0,0 +1,149 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
|
||||
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the tzdata library which can be installed by adding
|
||||
# `alembic[tz]` to the pip requirements.
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
@@ -0,0 +1,91 @@
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy_declarative_extensions import register_alembic_events
|
||||
|
||||
register_alembic_events(schemas = True, roles = True, grants = True, rows = True)
|
||||
|
||||
import os
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
DATABASE_URL = os.getenv('DATABASE_URL', '')
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
config.set_main_option('sqlalchemy.url', DATABASE_URL)
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
from app.models import Base
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,106 @@
|
||||
"""Init
|
||||
|
||||
Revision ID: 34ec41e7908b
|
||||
Revises:
|
||||
Create Date: 2026-04-21 18:48:24.829721
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '34ec41e7908b'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.Uuid(), nullable=False),
|
||||
sa.Column('username', sa.String(), nullable=False),
|
||||
sa.Column('full_name', sa.String(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=False),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column('is_archived', sa.Boolean(), nullable=False),
|
||||
sa.Column('hashed_password', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_users'))
|
||||
)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.execute("""CREATE FUNCTION "public_users_audit_insert"() RETURNS trigger LANGUAGE plpgsql AS $$
|
||||
BEGIN
|
||||
INSERT INTO "users_audit" ("audit_operation", "audit_timestamp", "audit_current_user", "id", "username", "full_name", "email", "type", "is_archived", "hashed_password")
|
||||
SELECT 'I', now(), current_user, NEW."id", NEW."username", NEW."full_name", NEW."email", NEW."type", NEW."is_archived", NEW."hashed_password";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;""")
|
||||
op.execute("""CREATE FUNCTION "public_users_audit_update"() RETURNS trigger LANGUAGE plpgsql AS $$
|
||||
BEGIN
|
||||
INSERT INTO "users_audit" ("audit_operation", "audit_timestamp", "audit_current_user", "id", "username", "full_name", "email", "type", "is_archived", "hashed_password")
|
||||
SELECT 'U', now(), current_user, NEW."id", NEW."username", NEW."full_name", NEW."email", NEW."type", NEW."is_archived", NEW."hashed_password";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;""")
|
||||
op.execute("""CREATE FUNCTION "public_users_audit_delete"() RETURNS trigger LANGUAGE plpgsql AS $$
|
||||
BEGIN
|
||||
INSERT INTO "users_audit" ("audit_operation", "audit_timestamp", "audit_current_user", "id", "username", "full_name", "email", "type", "is_archived", "hashed_password")
|
||||
SELECT 'D', now(), current_user, OLD."id", OLD."username", OLD."full_name", OLD."email", OLD."type", OLD."is_archived", OLD."hashed_password";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;""")
|
||||
op.execute("""CREATE TRIGGER "public_users_audit_insert" AFTER INSERT ON "users" FOR EACH ROW EXECUTE PROCEDURE "public_users_audit_insert"();""")
|
||||
op.execute("""CREATE TRIGGER "public_users_audit_update" AFTER UPDATE ON "users" FOR EACH ROW EXECUTE PROCEDURE "public_users_audit_update"();""")
|
||||
op.execute("""CREATE TRIGGER "public_users_audit_delete" AFTER DELETE ON "users" FOR EACH ROW EXECUTE PROCEDURE "public_users_audit_delete"();""")
|
||||
op.create_table('api_log',
|
||||
sa.Column('id', sa.Uuid(), nullable=False),
|
||||
sa.Column('api_key', sa.Uuid(), nullable=True),
|
||||
sa.Column('ip_address', postgresql.INET(), nullable=False),
|
||||
sa.Column('path', sa.String(), nullable=False),
|
||||
sa.Column('method', sa.String(), nullable=False),
|
||||
sa.Column('status_code', sa.Integer(), nullable=False),
|
||||
sa.Column('request_body', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('response_body', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('query_params', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('path_params', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('process_time', sa.Float(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_log'))
|
||||
)
|
||||
op.create_index('ix_unique_active_email', 'users', ['email'], unique=True, postgresql_where='is_archived = false')
|
||||
op.create_index('ix_unique_active_username', 'users', ['username'], unique=True, postgresql_where='is_archived = false')
|
||||
op.create_table('users_audit',
|
||||
sa.Column('audit_pk', sa.Integer(), nullable=False),
|
||||
sa.Column('audit_operation', sa.Unicode(length=1), nullable=False),
|
||||
sa.Column('audit_timestamp', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('audit_current_user', sa.Unicode(length=64), nullable=False),
|
||||
sa.Column('id', sa.Uuid(), nullable=True),
|
||||
sa.Column('username', sa.String(), nullable=True),
|
||||
sa.Column('full_name', sa.String(), nullable=True),
|
||||
sa.Column('email', sa.String(), nullable=True),
|
||||
sa.Column('type', sa.String(), nullable=True),
|
||||
sa.Column('is_archived', sa.Boolean(), nullable=True),
|
||||
sa.Column('hashed_password', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('audit_pk', name=op.f('pk_users_audit'))
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('users_audit')
|
||||
op.drop_index('ix_unique_active_username', table_name='users', postgresql_where='is_archived = false')
|
||||
op.drop_index('ix_unique_active_email', table_name='users', postgresql_where='is_archived = false')
|
||||
op.drop_table('users')
|
||||
op.drop_table('api_log')
|
||||
op.execute("""DROP TRIGGER "public_users_audit_delete" ON "users";""")
|
||||
op.execute("""DROP TRIGGER "public_users_audit_update" ON "users";""")
|
||||
op.execute("""DROP TRIGGER "public_users_audit_insert" ON "users";""")
|
||||
op.execute("""DROP FUNCTION public_users_audit_delete();""")
|
||||
op.execute("""DROP FUNCTION public_users_audit_update();""")
|
||||
op.execute("""DROP FUNCTION public_users_audit_insert();""")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,26 @@
|
||||
from pydantic import BaseModel, ConfigDict, ValidationError, EmailStr
|
||||
import uuid
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
username : str
|
||||
id : uuid.UUID
|
||||
email : EmailStr
|
||||
type : str
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
class CreateUserRequest(BaseModel):
|
||||
username : str
|
||||
email : EmailStr
|
||||
type : str
|
||||
password : str
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token : str
|
||||
refresh_token : str
|
||||
token_type : str
|
||||
|
||||
|
||||
class TokenData(BaseModel):
|
||||
username : str
|
||||
scopes : list[str] = []
|
||||
@@ -0,0 +1,185 @@
|
||||
"""
|
||||
Authentication according to the OAuth2 specification.
|
||||
|
||||
When user is created it's password is stored in form of hashed_password returned from function hash_password
|
||||
|
||||
Authentication flow
|
||||
|
||||
Login using username and password, -> returns jwt token if successful
|
||||
|
||||
When using other apis that needs authentication, get_current_user is called to get to validate and get the logged in user data
|
||||
get_current_user -> takes in bearer token and retrives user using oauth2_scheme
|
||||
|
||||
"""
|
||||
import jwt
|
||||
from datetime import timedelta, datetime
|
||||
from typing import Annotated
|
||||
from fastapi import Depends, FastAPI, APIRouter, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm, SecurityScopes
|
||||
from pydantic import BaseModel, ConfigDict, ValidationError
|
||||
from sqlalchemy import select
|
||||
from pwdlib import PasswordHash
|
||||
|
||||
from ..models import Users
|
||||
from .schemas import *
|
||||
from ..db.db import db_dependency
|
||||
from ..exceptions import ExceptionHandlerRoute
|
||||
|
||||
#TODO: env file
|
||||
SECRET_KEY = "17267606688d2ef63beeb906b8dab8448cb8b1e3d147678ed6d69eb9eb147b26"
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 120
|
||||
|
||||
router = APIRouter(prefix = '/auth', tags = ['Auth'], route_class = ExceptionHandlerRoute)
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl='/auth/login',
|
||||
scopes= {
|
||||
"me": "Read information about the current user",
|
||||
"Items": "Read items"
|
||||
}
|
||||
)
|
||||
password_has = PasswordHash.recommended()
|
||||
|
||||
|
||||
"""
|
||||
Helper functions to encode and decode and retriver user
|
||||
"""
|
||||
def verify_password(plain_password, hashed_password):
|
||||
return password_has.verify(plain_password, hashed_password)
|
||||
|
||||
def get_password_hash(password):
|
||||
return password_has.hash(password)
|
||||
|
||||
def get_user(db, username: str):
|
||||
query = select(Users).where((Users.username == username) | (Users.email == username))
|
||||
user = db.execute(query).first()
|
||||
|
||||
if user:
|
||||
return user[0]
|
||||
return None
|
||||
|
||||
|
||||
"""
|
||||
Takes in the jwt token and returns the user value from database
|
||||
Throws exceptions if username doesn't exist or password is incorrect
|
||||
"""
|
||||
def get_current_user(db: db_dependency, token : Annotated[str, Depends(oauth2_scheme)]) -> Users:
|
||||
credentials_exceptions = HTTPException(
|
||||
status_code = status.HTTP_401_UNAUTHORIZED,
|
||||
detail = "Could not validate credientals",
|
||||
headers = {"WWW-Authenticate": "Bearer"}
|
||||
)
|
||||
|
||||
try:
|
||||
#decode payload
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
username = payload.get("sub")
|
||||
|
||||
if username is None:
|
||||
raise credentials_exceptions
|
||||
|
||||
token_data = TokenData(username = username)
|
||||
|
||||
except jwt.InvalidTokenError:
|
||||
raise credentials_exceptions
|
||||
|
||||
#get user from userid
|
||||
user = get_user(db, username = token_data.username)
|
||||
|
||||
if user is None:
|
||||
raise credentials_exceptions
|
||||
return user
|
||||
|
||||
|
||||
@router.get('/users/me')
|
||||
def get_curr_user(current_user : Annotated[Users, Depends(get_current_user)]):
|
||||
return current_user
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: timedelta | None = None):
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now() + expires_delta
|
||||
else:
|
||||
expire = datetime.now() + timedelta(minutes = 15)
|
||||
|
||||
to_encode.update({"exp": expire})
|
||||
|
||||
encoded_jwt = jwt.encode(payload = to_encode, key = SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(data: dict):
|
||||
to_encode = data.copy()
|
||||
expire = datetime.now() + timedelta(days = 7)
|
||||
data.update({"exp": expire})
|
||||
return jwt.encode(data, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
@router.post("/refresh")
|
||||
def refresh_token(refresh_token : str):
|
||||
try:
|
||||
payload = jwt.decode(refresh_token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code = status.HTTP_401_UNAUTHORIZED,
|
||||
detail = "Invlid or expired refresh token"
|
||||
)
|
||||
|
||||
if not payload:
|
||||
raise HTTPException(
|
||||
status_code = status.HTTP_401_UNAUTHORIZED,
|
||||
detail = "Invlid or expired refresh token"
|
||||
)
|
||||
|
||||
username = payload.get("sub")
|
||||
|
||||
new_access_token = create_access_token({"sub": username})
|
||||
new_refresh_token = create_refresh_token({"sub": username})
|
||||
|
||||
return {
|
||||
"access_token" : new_access_token,
|
||||
"refresh_token": new_refresh_token,
|
||||
}
|
||||
|
||||
|
||||
def authenticate_user(db, username: str, password: str):
|
||||
#get user, hash password, verify password return true or false
|
||||
user = get_user(db, username)
|
||||
if not user:
|
||||
return False
|
||||
if not verify_password(password, user.hashed_password):
|
||||
return False
|
||||
return user
|
||||
|
||||
|
||||
|
||||
#authenticate user using autneticate_user and return token created from create_access_token
|
||||
@router.post("/login")
|
||||
def login(form_data: Annotated[OAuth2PasswordRequestForm, Depends()], db: db_dependency):
|
||||
|
||||
#get user from database
|
||||
user = authenticate_user(db, form_data.username, form_data.password)
|
||||
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code = status.HTTP_401_UNAUTHORIZED,
|
||||
detail = "Username or password is incorrect",
|
||||
headers = {"WWW-Authenticate": "Bearer"}
|
||||
)
|
||||
|
||||
#create access tokens
|
||||
access_token_expires = timedelta(minutes = ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
access_token = create_access_token(data = { "sub": user.username }, expires_delta= access_token_expires)
|
||||
refresh_token = create_access_token(data = { "sub": user.username })
|
||||
return Token(
|
||||
access_token = access_token,
|
||||
refresh_token = refresh_token,
|
||||
token_type = "bearer"
|
||||
)
|
||||
|
||||
user_dependency = Annotated[Users, Depends(get_current_user)]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from datetime import datetime, UTC
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from .db.db import get_db
|
||||
from .models import ApiLog
|
||||
|
||||
|
||||
def write_log(req: Request, res: StreamingResponse, req_body : dict, res_body: str, process_time : float):
|
||||
db = next(get_db())
|
||||
|
||||
try:
|
||||
res_body = json.loads(res_body)
|
||||
except Exception:
|
||||
res_body = None
|
||||
|
||||
client_ip = req.client.host
|
||||
if client_ip == 'testclient':
|
||||
client_ip = '127.0.0.1'
|
||||
|
||||
log = ApiLog(
|
||||
api_key = uuid.UUID(req.headers.get("x-api-key")) if req.headers.get("x-api-key") else None,
|
||||
ip_address = client_ip,
|
||||
path = req.url.path,
|
||||
method = req.method,
|
||||
status_code = res.status_code,
|
||||
request_body = req_body,
|
||||
response_body = res_body,
|
||||
query_params = dict(req.query_params),
|
||||
path_params = req.path_params,
|
||||
process_time = process_time,
|
||||
created_at = datetime.now(UTC)
|
||||
)
|
||||
|
||||
db.add(log)
|
||||
db.commit()
|
||||
db.close()
|
||||
@@ -0,0 +1,27 @@
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
class FilterParams(BaseModel):
|
||||
limit : int = Field(100, gt=0, le=100)
|
||||
offset : int = Field(0, ge=0)
|
||||
order_by : str = "created_at"
|
||||
search : str | None = None
|
||||
tags : list[str] = []
|
||||
|
||||
class ListResponseBase(BaseModel):
|
||||
total : int
|
||||
offset : int
|
||||
limit : int
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env")
|
||||
DATABASE_URL : str
|
||||
TEST_DATABASE_URL : str
|
||||
FIRST_SUPERUSER : str
|
||||
FIRST_SUPERUSER_PASSWORD : str
|
||||
FIRST_SUPERUSER_EMAIL : str
|
||||
|
||||
|
||||
settings = Settings()
|
||||
@@ -0,0 +1,49 @@
|
||||
from sqlalchemy import create_engine, event
|
||||
from sqlalchemy.orm import sessionmaker, with_loader_criteria
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from typing import Annotated
|
||||
from fastapi import Depends
|
||||
from sqlalchemy.orm import Session
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
|
||||
load_dotenv()
|
||||
|
||||
engine = create_engine(os.getenv("DATABASE_URL", ""))
|
||||
SessionLocal = sessionmaker(autoflush=False, bind=engine)
|
||||
|
||||
def safe_commit(db : Session):
|
||||
try:
|
||||
db.commit()
|
||||
except SQLAlchemyError:
|
||||
db.rollback()
|
||||
raise
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
except Exception:
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
@event.listens_for(SessionLocal, "do_orm_execute")
|
||||
def _add_filtering_criterial(execute_state):
|
||||
skip_filter = execute_state.execution_options.get("skip_filter", False)
|
||||
|
||||
if execute_state.is_select and not skip_filter:
|
||||
|
||||
from ..models.models import AuditMixin
|
||||
|
||||
execute_state.statement = execute_state.statement.options(
|
||||
with_loader_criteria(
|
||||
AuditMixin,
|
||||
lambda cls: cls.is_archived.is_(False),
|
||||
include_aliases = True,
|
||||
)
|
||||
)
|
||||
|
||||
db_dependency = Annotated[Session, Depends(get_db)]
|
||||
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
This file defines and handles all the errors
|
||||
The ExceptionHandlerRoute class is used by all the routes
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any, Coroutine, Type
|
||||
from fastapi import Request, Response
|
||||
from fastapi.routing import APIRoute
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from collections.abc import Callable
|
||||
from sqlalchemy.exc import NoResultFound, SQLAlchemyError
|
||||
|
||||
|
||||
class ProblemType(str, Enum):
|
||||
ITEM_NOT_FOUND = 'ITEM_NOT_FOUND'
|
||||
INTERNAL_SERVER_ERROR = 'INTERNAL_SERVER_ERROR'
|
||||
INVALID_CREDENTIALS = 'INVALID_CREDENTIALS'
|
||||
UNIQUE_CONSTRAINT = 'UNIQUE_CONSTRAINT'
|
||||
INVALID_REQUEST = 'INVALID_REQUEST'
|
||||
BLANK = 'BLANK'
|
||||
|
||||
class ProblemDetail(BaseModel):
|
||||
""" RFC 7807 Problem Details for HTTP APIs """
|
||||
type : ProblemType = ProblemType.BLANK
|
||||
status : int | None = None
|
||||
title : str | None = None
|
||||
detail : str | None = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
class ItemNotFoundException(Exception):
|
||||
def __init__(self, model: Type, key : Any = ""):
|
||||
self.model = model.__name__
|
||||
self.key = str(key)
|
||||
|
||||
|
||||
class ExceptionHandlerRoute(APIRoute):
|
||||
def get_route_handler(self) -> Callable:
|
||||
original_route_handler = super().get_route_handler()
|
||||
|
||||
async def custom_route_handler(request: Request) -> Response:
|
||||
try:
|
||||
return await original_route_handler(request)
|
||||
except SQLAlchemyError as exc:
|
||||
|
||||
problem = ProblemDetail(status = 422)
|
||||
|
||||
if type(exc) == NoResultFound:
|
||||
problem.type = ProblemType.ITEM_NOT_FOUND
|
||||
problem.title = f'{exc.model} Not Found'
|
||||
problem.detail = { 'key' : exc.key}
|
||||
problem.status = 404
|
||||
|
||||
elif type(exc.orig) == UniqueViolation:
|
||||
problem.detail = str(exc.orig).split('\n')[1].split(': ')[1]
|
||||
problem.type = ProblemType.UNIQUE_VALIDATION
|
||||
problem.title = 'Unique violation'
|
||||
|
||||
elif type(exc.orig) == ForeignKeyViolation:
|
||||
problem.type = ProblemType.FOREIGN_KEY_VIOLATION
|
||||
problem.title = 'Foreign Key Violation'
|
||||
problem.detail = str(exc.orig).split('\n')[1].split(': ')[1]
|
||||
|
||||
return JSONResponse(
|
||||
status_code = problem.status,
|
||||
content = problem.model_dump(exclude_none = True),
|
||||
headers = {'Content-Type': 'application/problem+json'}
|
||||
)
|
||||
raise HTTPException(status_code = 422, detail = detail)
|
||||
|
||||
except Exception as exc:
|
||||
problem = ProblemDetail(status = 500)
|
||||
problem.title = 'Internal Server Error please report the bug'
|
||||
problem.type = ProblemType.INTERNAL_SERVER_ERROR
|
||||
problem.detail = str(exc)
|
||||
|
||||
if type(exc) == ItemNotFoundException:
|
||||
problem.type = ProblemType.ITEM_NOT_FOUND
|
||||
problem.title = f'{exc.model} Not Found'
|
||||
problem.detail = { 'key' : exc.key}
|
||||
problem.status = 404
|
||||
|
||||
if type(exc) == RequestValidationError:
|
||||
problem.type = ProblemType.INVALID_REQUEST
|
||||
problem.title = "Input validation error"
|
||||
problem.detail = exc.errors()
|
||||
problem.status = 422
|
||||
|
||||
if type(exc) == HTTPException:
|
||||
print(exc.status_code)
|
||||
problem.title = "Credientials error"
|
||||
problem.detail = exc.detail
|
||||
problem.status = exc.status_code
|
||||
|
||||
|
||||
if problem.status == 401:
|
||||
problem.type = ProblemType.INVALID_CREDENTIALS
|
||||
|
||||
return JSONResponse(
|
||||
status_code = problem.status,
|
||||
content = problem.model_dump(exclude_none = True),
|
||||
headers = {'Content-Type': 'application/problem+json'}
|
||||
)
|
||||
|
||||
return custom_route_handler
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
+174
@@ -0,0 +1,174 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI
|
||||
from .exceptions import ExceptionHandlerRoute
|
||||
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.orm import Session
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from .models import Users
|
||||
from .db.db import engine
|
||||
from .db.config import settings
|
||||
from .middlewares import app_middleware
|
||||
from .auth.views import get_password_hash
|
||||
from .routes import routers, docs
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def init_db(app: FastAPI):
|
||||
with Session(engine) as session:
|
||||
from sqlalchemy import select, insert
|
||||
user = session.execute(select(Users).where(Users.username == settings.FIRST_SUPERUSER)).first()
|
||||
if not user:
|
||||
session.execute(insert(Users).values(
|
||||
username = settings.FIRST_SUPERUSER,
|
||||
hashed_password = get_password_hash(settings.FIRST_SUPERUSER_PASSWORD),
|
||||
full_name = "admin",
|
||||
type = 'ADMIN',
|
||||
email = settings.FIRST_SUPERUSER_EMAIL
|
||||
))
|
||||
session.commit()
|
||||
yield
|
||||
|
||||
description = """
|
||||
# Description
|
||||
This part of the documentation includes overview of the backend, backend internals and common attributes shared by all the apis.
|
||||
|
||||
The backend is written using [FastAPI](https://fastapi.tiangolo.com/), [SQLAlchemy](https://www.sqlalchemy.org/) and [Postgresql](https://www.postgresql.org/).
|
||||
|
||||
## Documentation
|
||||
Redoc with openapi will serve as primary documentation, which will be available publicly in development and staging environment and not for production evnrionment for security reasons.
|
||||
|
||||
## Authorization
|
||||
Authorization is implemended using OAuth2 using password and username, logging with external providers like google, github etc. is not supported.
|
||||
|
||||
Each type of user i.e. doctor, patient, staff is required to create their own username and password while registering, so that will be able to login to the system.
|
||||
|
||||
The auth token expiry duration is 120 minutes currently
|
||||
|
||||
|
||||
## Common columns
|
||||
All tables in the backend shares some common columns, these columns will be described here and not in the respective API as it has the same meaning everywhere.
|
||||
|
||||
|
||||
1. __created_at, updated_at__ : represents the datetime when the row was added, updated respectively
|
||||
|
||||
2. __create_by_id, updated_by_id__ : foreign key to user's id, it's extracted from the logged in user who created or updated the row.
|
||||
|
||||
3. __is_archived__ : It represents if the row is deleted, allows us to implement soft delete.
|
||||
|
||||
|
||||
## Audit
|
||||
|
||||
Each table on the backend has it's respective audit table with suffix ___audit__. i.e __appointment__ table has __appointment_audit__.
|
||||
It stores all the changes done in each row including which operation it was, which columns were changed and what's it's old and new values.
|
||||
|
||||
APIs will be created to access the audit for each table.
|
||||
|
||||
It's implemented using sqlalchemy_declarative_extention library
|
||||
|
||||
## Internals
|
||||
|
||||
### Primary, Foreign Key
|
||||
|
||||
All primary keys are [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier), which makes it possible to avoid alot of security risks and implement dynamic features.
|
||||
Due to this all the foreign keys are also UUIDs
|
||||
UUIDs can be generated in postgres or in python easy and fast
|
||||
|
||||
## Errors
|
||||
|
||||
Errors are implemented using [RFC9457](https://www.rfc-editor.org/rfc/rfc9457.html) with some slight modification. Here is a sample error response.
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "UNIQUE_VALIDATION",
|
||||
"status": 422,
|
||||
"detail": "Key (username)=(banana) already exists.",
|
||||
"title": "Unique violation"
|
||||
}
|
||||
```
|
||||
|
||||
The type will always be a standard enum value. the status will always be the http status code.
|
||||
The type, status, title and detail will __always__ exist,
|
||||
|
||||
For now in case of pydantic validation error, the raw pydantic message will be set in details
|
||||
|
||||
In case of internal server error the type will be 'INTERNAL_SERVER_ERROR' and it's detail will contain the error message.
|
||||
|
||||
In this way all the errors will be well documented and handled in a central module, It will also make writing automated tests easier.
|
||||
|
||||
|
||||
# Testing
|
||||
This section covers the steps for testing and guidelines for reporting and verifying bugs for backend api.
|
||||
In the future the manual for QA for frontend will also be included here.
|
||||
|
||||
|
||||
## Automated testing
|
||||
Automated testing is done using fastapi's testclient, pytest and faker
|
||||
This test creates new database on startup, runs the tests and deletes everything, so the errors i can be reproduceable and deterministic.
|
||||
The test interacts to the backend only thru API
|
||||
|
||||
### Prerequisites
|
||||
1. Python
|
||||
2. Postgresql server with a clean database (You can avoid it by using test database on the server)
|
||||
|
||||
### Steps
|
||||
1. Clone the [repository](https://git.aayutech.dev/fourleaf/backend)
|
||||
2. Create .env file using .env.example as reference
|
||||
3. Create virtual environment and install packages from requirements.txt
|
||||
4. Run tests using pytest (lookup pytest's documentation for more information)
|
||||
5. Create a test inside the tests folder
|
||||
|
||||
|
||||
## Manual testing
|
||||
Manual testing is done using postman or similar api client.
|
||||
Url for testing in dev environment is `https://api-dev.aayutech.dev`
|
||||
|
||||
|
||||
### Steps
|
||||
1. Download postmand collection from the git server (optional)
|
||||
2. Create and run requests from postman
|
||||
|
||||
## Reporting bug
|
||||
All the bug reporting will be done using kanban `https://project.aayutech.dev`
|
||||
|
||||
### Bug criteria
|
||||
1. All __INTERNAL_SERVER_ERROR__
|
||||
2. All about:none errors
|
||||
3. All errors that consists of raw sql messages
|
||||
4. Errors that doesn't match the description of the api
|
||||
5. Bug should be able to be reproduced
|
||||
|
||||
### Bug reporting steps
|
||||
1. Create new task in the `Bug Report` column in kanboard
|
||||
2. Write short description about the bug and how it can be reproduced
|
||||
3. Optionally take screenshot of postman or terminal where the bug can be seen
|
||||
4. Add related user as assigne
|
||||
|
||||
### Bug resolve steps
|
||||
1. After a bug is reported a QA or Developer will approve the bug and put it in the `BUG` column.
|
||||
2. After the bug is verified, the respective developer will fix the bug and put it in the `Read for QA` column.
|
||||
3. A QA will reverify the bug and put it in the `DONE` column.
|
||||
|
||||
"""
|
||||
|
||||
app = FastAPI(
|
||||
version = "0.0.1",
|
||||
lifespan = init_db,
|
||||
title = "Fastapi template",
|
||||
openapi_tags = docs,
|
||||
description = description
|
||||
)
|
||||
|
||||
for router in routers: app.include_router(router)
|
||||
|
||||
app.middleware('http')(app_middleware)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware
|
||||
,allow_origins=["*"]
|
||||
,allow_credentials=True # Set to True if you need to support cookies/authorization headers
|
||||
,allow_methods=["*"] # Allows all methods (GET, POST, PUT, DELETE, OPTIONS, etc.)
|
||||
,allow_headers=["*"] # Allows all headers
|
||||
)
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
from datetime import datetime
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from typing import Annotated
|
||||
from fastapi import Body, FastAPI, HTTPException, Response, Request
|
||||
from fastapi.exceptions import RequestValidationError as exc
|
||||
from fastapi.routing import APIRoute
|
||||
from sqlalchemy.exc import SQLAlchemyError, IntegrityError
|
||||
from psycopg.errors import UniqueViolation
|
||||
from starlette.concurrency import iterate_in_threadpool
|
||||
from starlette.background import BackgroundTask
|
||||
from .background import write_log
|
||||
|
||||
async def app_middleware(request: Request, call_next):
|
||||
|
||||
try:
|
||||
req_body = await request.json()
|
||||
except Exception:
|
||||
req_body = None
|
||||
|
||||
start_time = time.perf_counter()
|
||||
response = await call_next(request)
|
||||
process_time = time.perf_counter() - start_time
|
||||
|
||||
if request.url.path.split('/')[1] != 'static':
|
||||
res_body = [section async for section in response.body_iterator]
|
||||
response.body_iterator = iterate_in_threadpool(iter(res_body))
|
||||
|
||||
if res_body != None and len(res_body) > 0:
|
||||
res_body = res_body[0].decode()
|
||||
else:
|
||||
res_body = True
|
||||
|
||||
#add the background task to the background response object to queue the job
|
||||
response.background = BackgroundTask(write_log, request, response, req_body, res_body, process_time)
|
||||
return response
|
||||
@@ -0,0 +1,93 @@
|
||||
from datetime import datetime
|
||||
from typing import NewType
|
||||
from sqlalchemy import MetaData, Uuid, Index
|
||||
from sqlalchemy_declarative_extensions import declarative_database
|
||||
from sqlalchemy_declarative_extensions.audit import audit
|
||||
from sqlalchemy.orm import declarative_base, declared_attr, Mapped, mapped_column
|
||||
from sqlalchemy.dialects.postgresql import INET, JSONB, ARRAY
|
||||
import uuid
|
||||
|
||||
nameing_metadata = MetaData(naming_convention= {
|
||||
"ix": "ix_%(column_0_label)s",
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"ck": "ck_%(table_name)s_%(constraint_name)s",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s",
|
||||
})
|
||||
|
||||
nstr30 = NewType("nstr30", str)
|
||||
nstr50 = NewType("nstr50", str)
|
||||
nstr100 = NewType("nstr100", str)
|
||||
nstr255 = NewType("nstr255", str)
|
||||
tz_date = NewType("tz_date", datetime)
|
||||
|
||||
Base = declarative_database(declarative_base(metadata=nameing_metadata))
|
||||
|
||||
|
||||
|
||||
class ApiLog(Base):
|
||||
__tablename__ = 'api_log'
|
||||
|
||||
id : Mapped[uuid.UUID] = mapped_column(primary_key=True, default = uuid.uuid7)
|
||||
api_key : Mapped[uuid.UUID | None]
|
||||
ip_address : Mapped[str] = mapped_column(INET)
|
||||
path : Mapped[str]
|
||||
method : Mapped[str]
|
||||
status_code : Mapped[int]
|
||||
request_body : Mapped[dict | None] = mapped_column(JSONB)
|
||||
response_body : Mapped[dict | None] = mapped_column(JSONB)
|
||||
query_params : Mapped[dict | None] = mapped_column(JSONB)
|
||||
path_params : Mapped[dict | None] = mapped_column(JSONB)
|
||||
process_time : Mapped[float]
|
||||
created_at : Mapped[datetime]
|
||||
|
||||
class AuditMixin:
|
||||
@declared_attr
|
||||
def created_at(cls) -> Mapped[tz_date]:
|
||||
return mapped_column()
|
||||
|
||||
@declared_attr
|
||||
def updated_at(cls) -> Mapped[tz_date]:
|
||||
return mapped_column()
|
||||
|
||||
@declared_attr
|
||||
def created_by(cls) -> Mapped[Users | None]:
|
||||
return mapped_column(foreign_key=[cls.created_by_id], remote_side=[Users.id])
|
||||
|
||||
@declared_attr
|
||||
def updated_by(cls) -> Mapped[Users | None]:
|
||||
return mapped_column(foreign_key=[cls.updated_by_id], remote_side=[Users.id])
|
||||
|
||||
@declared_attr
|
||||
def updated_by_id(cls) -> Mapped[Uuid]:
|
||||
return mapped_column()
|
||||
|
||||
@declared_attr
|
||||
def created_by_id(cls) -> Mapped[Uuid]:
|
||||
return mapped_column()
|
||||
|
||||
|
||||
@audit()
|
||||
class Users(Base):
|
||||
__tablename__ = "users"
|
||||
id : Mapped[Uuid] = mapped_column(Uuid, primary_key=True, default=uuid.uuid4)
|
||||
username : Mapped[nstr50] = mapped_column()
|
||||
full_name : Mapped[nstr100]
|
||||
email : Mapped[nstr255] = mapped_column()
|
||||
type : Mapped[nstr50] = mapped_column(default="staff")
|
||||
is_archived : Mapped[bool] = mapped_column(default=False)
|
||||
hashed_password: Mapped[nstr255]
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_unique_active_email", email, unique=True, postgresql_where="is_archived = false"),
|
||||
Index("ix_unique_active_username", username, unique=True, postgresql_where="is_archived = false"),
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
import importlib
|
||||
|
||||
from .exceptions import ExceptionHandlerRoute
|
||||
|
||||
module_path = [
|
||||
'.auth.views'
|
||||
]
|
||||
|
||||
routers = []
|
||||
docs = []
|
||||
|
||||
for path in module_path:
|
||||
module = importlib.import_module(path, package = 'app')
|
||||
|
||||
if hasattr(module, 'router'):
|
||||
assert(type(module.router.route_class == ExceptionHandlerRoute))
|
||||
routers.append(module.router)
|
||||
|
||||
if hasattr(module, 'docs'):
|
||||
tag = module.router.tags[0]
|
||||
docs.append({"name" : tag, "description" : module.docs})
|
||||
@@ -0,0 +1,102 @@
|
||||
annotated-doc==0.0.4
|
||||
annotated-types==0.7.0
|
||||
anyio==4.13.0
|
||||
certifi==2026.2.25
|
||||
click==8.3.2
|
||||
dnspython==2.8.0
|
||||
email-validator==2.3.0
|
||||
fastapi==0.136.0
|
||||
fastapi-cli==0.0.24
|
||||
fastapi-cloud-cli==0.17.0
|
||||
fastar==0.11.0
|
||||
greenlet==3.4.0
|
||||
h11==0.16.0
|
||||
httpcore==1.0.9
|
||||
httptools==0.7.1
|
||||
httpx==0.28.1
|
||||
idna==3.11
|
||||
Jinja2==3.1.6
|
||||
markdown-it-py==4.0.0
|
||||
MarkupSafe==3.0.3
|
||||
mdurl==0.1.2
|
||||
psycopg2==2.9.12
|
||||
pydantic==2.13.2
|
||||
pydantic-extra-types==2.11.1
|
||||
pydantic-settings==2.13.1
|
||||
pydantic_core==2.46.2
|
||||
Pygments==2.20.0
|
||||
python-dotenv==1.2.2
|
||||
python-multipart==0.0.26
|
||||
PyYAML==6.0.3
|
||||
rich==15.0.0
|
||||
rich-toolkit==0.19.7
|
||||
rignore==0.7.6
|
||||
sentry-sdk==2.58.0
|
||||
shellingham==1.5.4
|
||||
SQLAlchemy==2.0.49
|
||||
sqlalchemy-declarative-extensions==0.16.8
|
||||
starlette==1.0.0
|
||||
typer==0.24.1
|
||||
typing-inspection==0.4.2
|
||||
typing_extensions==4.15.0
|
||||
urllib3==2.6.3
|
||||
uvicorn==0.44.0
|
||||
uvloop==0.22.1
|
||||
watchfiles==1.1.1
|
||||
websockets==16.0
|
||||
alembic==1.18.4
|
||||
annotated-doc==0.0.4
|
||||
annotated-types==0.7.0
|
||||
anyio==4.13.0
|
||||
argon2-cffi==25.1.0
|
||||
argon2-cffi-bindings==25.1.0
|
||||
certifi==2026.2.25
|
||||
cffi==2.0.0
|
||||
click==8.3.2
|
||||
cryptography==46.0.7
|
||||
dnspython==2.8.0
|
||||
email-validator==2.3.0
|
||||
fastapi==0.136.0
|
||||
fastapi-cli==0.0.24
|
||||
fastapi-cloud-cli==0.17.0
|
||||
fastar==0.11.0
|
||||
greenlet==3.4.0
|
||||
h11==0.16.0
|
||||
httpcore==1.0.9
|
||||
httptools==0.7.1
|
||||
httpx==0.28.1
|
||||
idna==3.11
|
||||
Jinja2==3.1.6
|
||||
jwt==1.4.0
|
||||
Mako==1.3.11
|
||||
markdown-it-py==4.0.0
|
||||
MarkupSafe==3.0.3
|
||||
mdurl==0.1.2
|
||||
psycopg==3.3.3
|
||||
psycopg2==2.9.12
|
||||
pwdlib==0.3.0
|
||||
pycparser==3.0
|
||||
pydantic==2.13.2
|
||||
pydantic-extra-types==2.11.1
|
||||
pydantic-settings==2.13.1
|
||||
pydantic_core==2.46.2
|
||||
Pygments==2.20.0
|
||||
python-dotenv==1.2.2
|
||||
python-multipart==0.0.26
|
||||
PyYAML==6.0.3
|
||||
rich==15.0.0
|
||||
rich-toolkit==0.19.7
|
||||
rignore==0.7.6
|
||||
sentry-sdk==2.58.0
|
||||
shellingham==1.5.4
|
||||
SQLAlchemy==2.0.49
|
||||
sqlalchemy-declarative-extensions==0.16.8
|
||||
starlette==1.0.0
|
||||
typer==0.24.1
|
||||
typing-inspection==0.4.2
|
||||
typing_extensions==4.15.0
|
||||
urllib3==2.6.3
|
||||
uvicorn==0.44.0
|
||||
uvloop==0.22.1
|
||||
watchfiles==1.1.1
|
||||
websockets==16.0
|
||||
Reference in New Issue
Block a user