Compare commits

...

2 Commits

22 changed files with 461 additions and 73 deletions

View File

@ -2,9 +2,17 @@
DOCKER_COMPOSE = docker-compose
DOCKER_RUN = $(DOCKER_COMPOSE) run --rm backend
# General
# -------
# Default target
.PHONY: all
all: docker-up
.PHONY: up
up: docker-up
# Shortcut for first start (initializing database, etc.)
.PHONY: first-start
first-start: docker-build db-upgrade docker-up
# Container management
@ -16,7 +24,7 @@ docker-up:
.PHONY: docker-down
docker-down:
$(DOCKER_COMPOSE) down
$(DOCKER_COMPOSE) down --remove-orphans
.PHONY: docker-build
docker-build:
@ -28,7 +36,7 @@ docker-rebuild:
.PHONY: docker-purge
docker-purge:
$(DOCKER_COMPOSE) down --volumes
$(DOCKER_COMPOSE) down --remove-orphans --volumes
.PHONY: docker-restart
docker-restart:
@ -45,3 +53,23 @@ docker-run:
.PHONY: docker-shell
docker-shell:
$(DOCKER_RUN) bash
# Database management
# -------------------
# Run migrations to upgrade the database to the head revision (set REVISION parameter to override)
.PHONY: db-upgrade
db-upgrade:
$(DOCKER_RUN) alembic upgrade $(or $(REVISION),head)
# Run migrations to downgrade the database to the previous revision (set REVISION parameter to override)
.PHONY: db-downgrade
db-downgrade:
$(DOCKER_RUN) alembic downgrade $(or $(REVISION),-1)
# Autogenerate a revision for database migration (requires MESSAGE parameter to set the revision message)
.PHONY: db-generate-migration
db-generate-migration:
@test -n "$(MESSAGE)" || (echo "Please set the revision message: make db-generate-migration MESSAGE=\"...\""; exit 1)
$(DOCKER_RUN) alembic revision --autogenerate -m "$(MESSAGE)"

View File

@ -10,6 +10,8 @@ flask = "~=2.0"
pyyaml = "*"
sqlalchemy = "~=1.4"
pymysql = "*"
alembic = "~=1.7"
python-dateutil = "*"
[dev-packages]

34
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "b0c979ea7ddef64da26c3f7537dbadaa9de352c7a8f6cde551f7ce85519de503"
"sha256": "f63a254c353bc5c8e6cbe2a920f8402ba0da5bec775bfff91f64fa485e9bf95c"
},
"pipfile-spec": 6,
"requires": {
@ -16,6 +16,14 @@
]
},
"default": {
"alembic": {
"hashes": [
"sha256:29be0856ec7591c39f4e1cb10f198045d890e6e2274cf8da80cb5e721a09642b",
"sha256:4961248173ead7ce8a21efb3de378f13b8398e6630fab0eb258dc74a8af24c58"
],
"index": "pypi",
"version": "==1.7.7"
},
"click": {
"hashes": [
"sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e",
@ -117,6 +125,14 @@
"markers": "python_version >= '3.7'",
"version": "==3.1.1"
},
"mako": {
"hashes": [
"sha256:23aab11fdbbb0f1051b93793a58323ff937e98e34aece1c4219675122e57e4ba",
"sha256:9a7c7e922b87db3686210cf49d5d767033a41d4010b284e747682c92bddd8b39"
],
"markers": "python_version >= '3.7'",
"version": "==1.2.0"
},
"markupsafe": {
"hashes": [
"sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003",
@ -171,6 +187,14 @@
"index": "pypi",
"version": "==1.0.2"
},
"python-dateutil": {
"hashes": [
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
],
"index": "pypi",
"version": "==2.8.2"
},
"pyyaml": {
"hashes": [
"sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293",
@ -218,6 +242,14 @@
"markers": "python_version >= '3.7'",
"version": "==62.1.0"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sqlalchemy": {
"hashes": [
"sha256:093b3109c2747d5dc0fa4314b1caf4c7ca336d5c8c831e3cfbec06a7e861e1e6",

50
alembic.ini Normal file
View File

@ -0,0 +1,50 @@
[alembic]
# Path to migration scripts
script_location = migrations
# Add project root directory to the import path
prepend_sys_path = .
# String template for migration filenames
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# Timezone for the date within the migration files and filenames
timezone = Europe/Berlin
# Max length for the "slug" field in filenames
truncate_slug_length = 24
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -34,7 +34,7 @@ services:
timeout: 1s
retries: 20
adminer:
phpmyadmin:
image: phpmyadmin
ports:
- '8099:80'

68
migrations/env.py Normal file
View File

@ -0,0 +1,68 @@
from logging.config import fileConfig
from alembic import context
from tofu_api.app import create_app
from tofu_api.models import BaseModel
# This is the Alembic Config object, which provides access to the values within the .ini file in use.
alembic_config = context.config
# Interpret the config file for Python logging. This line sets up loggers basically.
if alembic_config.config_file_name is not None:
fileConfig(alembic_config.config_file_name)
# Create Flask app, which loads the app config and initializes the database engine.
app = create_app()
db = app.dependencies.get_sqlalchemy()
def process_revision_directives(_context, _revision, directives):
"""
Callback used to prevent generating empty migrations with autogenerate.
Source: https://alembic.sqlalchemy.org/en/latest/cookbook.html#don-t-generate-empty-migrations-with-autogenerate
"""
if alembic_config.cmd_opts.autogenerate and directives[0].upgrade_ops.is_empty():
directives[:] = []
context_parameters = {
'target_metadata': BaseModel.metadata,
'process_revision_directives': process_revision_directives,
'user_module_prefix': 'tofu_types.',
}
def run_migrations_offline():
"""
Run migrations in 'offline' mode, which does not require an actual database engine and can be used to generate SQL scripts.
"""
context.configure(
url=app.config.sqlalchemy_database_uri,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
**context_parameters,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""
Run migrations in 'online' mode, which requires a database engine.
"""
with db.engine.connect() as connection:
context.configure(
connection=connection,
**context_parameters,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

27
migrations/script.py.mako Normal file
View File

@ -0,0 +1,27 @@
"""
${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
import tofu_api.common.database.types as tofu_types
${imports if imports else ""}
# Revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,38 @@
"""
Initial revision
Revision ID: 044f3afd19b0
Revises:
Create Date: 2022-04-15 21:41:39.962542+02:00
"""
from alembic import op
import sqlalchemy as sa
import tofu_api.common.database.types as tofu_types
# Revision identifiers, used by Alembic.
revision = '044f3afd19b0'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', tofu_types.TzDateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('modified_at', tofu_types.TzDateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_task'))
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('task')
# ### end Alembic commands ###

View File

@ -6,6 +6,7 @@ from flask import Flask
from tofu_api.api import TofuApiBlueprint
from tofu_api.common.config import Config
from tofu_api.common.json import JSONEncoder
from tofu_api.dependencies import Dependencies
# Enable deprecation warnings in dev environment
@ -19,6 +20,7 @@ class App(Flask):
"""
# Override Flask classes
config_class = Config
json_encoder = JSONEncoder
# Set type hint for config
config: Config
@ -57,15 +59,11 @@ class App(Flask):
db = self.dependencies.get_sqlalchemy()
db.init_database(self)
# Import models to fill the metadata object
# Import models to populate the database metadata
import tofu_api.models # noqa (unused import)
# Create all tables
# TODO: Use migrations instead
db.create_all_tables()
def create_app() -> Flask:
def create_app() -> App:
"""
App factory, returns a Flask app object.
"""

View File

@ -1,3 +1,3 @@
from .metadata import metadata_obj
from .model import Model
from .metadata import MetaData
from .sqlalchemy import SQLAlchemy
from .typing import Col, Rel

View File

@ -1,17 +1,25 @@
from sqlalchemy import MetaData
from sqlalchemy import MetaData as _MetaData
__all__ = [
'metadata_obj',
'MetaData',
]
# Define naming convention for constraints
_naming_convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
# Create global metadata object for database schemas
metadata_obj = MetaData(naming_convention=_naming_convention)
class MetaData(_MetaData):
"""
App specific subclass of the SQLAlchemy MetaData class.
"""
# Define naming convention for constraints
_naming_convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
def __init__(self, *args, naming_convention=None, **kwargs):
if not naming_convention:
naming_convention = self._naming_convention
super().__init__(*args, naming_convention=naming_convention, **kwargs)

View File

@ -0,0 +1,23 @@
from datetime import datetime
from sqlalchemy import Column, func
from sqlalchemy.orm import declarative_mixin
from tofu_api.common.database import Col
from tofu_api.common.database.types import TzDateTime
__all__ = [
'TimestampMixin'
]
@declarative_mixin
class TimestampMixin:
"""
Mixin for database models that provides the "created_at" and "modified_at" columns.
"""
# Created timestamp (automatically set to NOW() once on object creation)
created_at: Col[datetime] = Column(TzDateTime, nullable=False, server_default=func.now())
# Modified timestamp (automatically set to NOW() on each update)
modified_at: Col[datetime] = Column(TzDateTime, nullable=False, server_default=func.now(), onupdate=func.now())

View File

@ -1,10 +0,0 @@
from sqlalchemy.orm import declarative_base
from .metadata import metadata_obj
__all__ = [
'Model',
]
# Generate declarative base class for database models
Model = declarative_base(name='Model', metadata=metadata_obj)

View File

@ -1,12 +1,11 @@
from typing import Optional, cast
from flask import Flask
from sqlalchemy import MetaData, create_engine
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session, scoped_session, sessionmaker
from tofu_api.common.config import Config
from .metadata import metadata_obj
__all__ = [
'SQLAlchemy',
@ -70,22 +69,3 @@ class SQLAlchemy:
# For all further purposes, the scoped session should be treated like a regular Session object.
# Use cast() so we can use Session as the type annotation.
return cast(Session, self._scoped_session)
@property
def metadata(self) -> MetaData:
"""
Database metadata object.
"""
return metadata_obj
def create_all_tables(self) -> None:
"""
Create tables in the database for all models defined in the metadata.
"""
self.metadata.create_all(self.engine)
def drop_all_tables(self) -> None:
"""
Delete tables in the database for all models defined in the metadata.
"""
self.metadata.drop_all(self.engine)

View File

@ -0,0 +1 @@
from .tz_date_time import TzDateTime

View File

@ -0,0 +1,39 @@
from datetime import datetime, timezone
from sqlalchemy import DateTime, TypeDecorator
__all__ = [
'TzDateTime',
]
class TzDateTime(TypeDecorator):
"""
Custom SQLAlchemy data type for timezone aware datetimes.
"""
impl = DateTime
cache_ok = True
@property
def python_type(self):
return datetime
def process_bind_param(self, value: datetime, dialect):
"""
Convert a datetime object that is bound to a query parameter.
"""
if value is not None and value.tzinfo:
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return value
def process_result_value(self, value: datetime, dialect):
"""
Convert a datetime object from a query result.
"""
return value.replace(tzinfo=timezone.utc) if value is not None else None
def process_literal_param(self, value: datetime, dialect):
"""
Convert a literal parameter value to be rendered inline within a statement.
"""
return self.process_bind_param(value, dialect)

View File

@ -0,0 +1,14 @@
from typing import TypeVar, Union
from sqlalchemy import Column
from sqlalchemy.orm import RelationshipProperty
__all__ = [
'Col',
'Rel',
]
# Define type aliases for SQLAlchemy columns and relationships in declarative models
_T = TypeVar('_T')
Col = Union[Column, _T]
Rel = Union[RelationshipProperty, _T]

View File

@ -0,0 +1 @@
from .json_encoder import JSONEncoder

View File

@ -0,0 +1,29 @@
from datetime import datetime
from typing import Any
from flask.json import JSONEncoder as _FlaskJSONEncoder
__all__ = [
'JSONEncoder',
]
class JSONEncoder(_FlaskJSONEncoder):
"""
Custom JSON encoder built on top of the Flask JSONEncoder class.
"""
def default(self, obj: Any) -> Any:
"""
Convert any object to a JSON serializable type.
"""
# Convert datetimes to ISO format without microseconds (e.g. '2022-01-02T10:20:30+00:00')
if isinstance(obj, datetime):
return obj.isoformat(timespec='seconds')
# Use to_dict() method on objects that have it
if hasattr(obj, 'to_dict'):
return obj.to_dict()
# Fallback to the Flask JSONEncoder
return super().default(obj)

View File

@ -1 +1,5 @@
# Base model first
from .base import BaseModel
# Data models
from .task import Task

62
tofu_api/models/base.py Normal file
View File

@ -0,0 +1,62 @@
from typing import Any, Iterable, Optional
from sqlalchemy import Column, Integer, inspect
from sqlalchemy.orm import InstanceState, as_declarative
from tofu_api.common.database import Col, MetaData
__all__ = [
'BaseModel',
]
@as_declarative(name='BaseModel', metadata=MetaData())
class BaseModel:
"""
Declarative base class for database models.
"""
# Default primary key
id: Col[int] = Column(Integer, nullable=False, primary_key=True)
def __repr__(self) -> str:
"""
Return a string representation of this object.
"""
return self._repr(id=self.id) if hasattr(self, 'id') else self._repr()
def _repr(self, **fields) -> str:
"""
Helper method for implementing __repr__.
"""
state: InstanceState = inspect(self)
state_str = f' [transient {id(self)}]' if state.transient \
else f' [pending {id(self)}]' if state.pending \
else ' [deleted]' if state.deleted \
else ' [detached]' if state.detached else ''
param_str = ', '.join([f'{key}={value!r}' for key, value in fields.items()] if fields else state.identity or [])
return f'<{type(self).__name__}({param_str}){state_str}>'
def to_dict(
self,
*,
fields: Optional[Iterable[str]] = None,
exclude: Optional[Iterable[str]] = None,
) -> dict[str, Any]:
"""
Return the object's data as a dictionary.
By default, the dictionary will contain all table columns (with their column name as key) defined in the model. This can be
overridden by setting the `fields` and/or `exclude` parameters, in which case only fields that are listed in `fields` will be
included in the dictionary, except for fields listed in `exclude`.
"""
# Determine fields to include in dictionary (starting will all table columns)
included_fields = set(column.name for column in self.__table__.columns)
if fields is not None:
included_fields.intersection_update(fields)
if exclude is not None:
included_fields.difference_update(exclude)
return {
field: getattr(self, field) for field in included_fields
}

View File

@ -1,25 +1,19 @@
from sqlalchemy import Column, Integer, String, Text
from sqlalchemy import Column, String, Text
from tofu_api.common.database import Model
from tofu_api.common.database import Col
from tofu_api.common.database.mixins import TimestampMixin
from .base import BaseModel
class Task(Model):
class Task(TimestampMixin, BaseModel):
"""
Database model for tasks.
"""
__tablename__ = 'tasks'
__tablename__ = 'task'
id: int = Column(Integer, nullable=False, primary_key=True)
# TODO: created_at, modified_at
title: Col[str] = Column(String(255), nullable=False)
description: Col[str] = Column(Text, nullable=False, default='')
title: str = Column(String(255), nullable=False)
description: str = Column(Text, nullable=False, default='')
def to_dict(self) -> dict:
# TODO: Implement a generic to_dict() in the base model
return {
'id': self.id,
'title': self.title,
'description': self.description,
}
def __repr__(self):
return self._repr(id=self.id, title=self.title)