Add Docker, migrations, and initial database setup

Added Dockerfile and docker-compose.yaml for containerization. Introduced Alembic migrations with initial schema and a migration to alter column types. Added requirements.txt for dependencies and removed temp.py cleanup script. Updated .gitignore to allow tracking of migration files.
This commit is contained in:
2025-08-11 00:49:46 +02:00
parent b8bd8d802b
commit 66204e7e4a
11 changed files with 349 additions and 17 deletions

2
.gitignore vendored
View File

@@ -1,5 +1,3 @@
migrations/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]

11
Dockerfile Normal file
View File

@@ -0,0 +1,11 @@
FROM python:3.12-slim
# Everything will be done in /app (Not in the main OS Image)
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD ["python", "app.py"]

9
docker-compose.yaml Normal file
View File

@@ -0,0 +1,9 @@
version: "3.9"
services:
app:
build: .
container_name: my_python_sqlite_app
ports:
- "5000:5000" # change if needed
volumes:
- ./data:/app/data # maps host ./data to container /app/data

1
migrations/README Normal file
View File

@@ -0,0 +1 @@
Single-database configuration for Flask.

50
migrations/alembic.ini Normal file
View File

@@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

113
migrations/env.py Normal file
View File

@@ -0,0 +1,113 @@
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives
connectable = get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,73 @@
"""empty message
Revision ID: 46cd3e1a3b67
Revises:
Create Date: 2025-07-07 14:31:50.181885
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '46cd3e1a3b67'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('unit',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('symbol', sa.String(length=10), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('symbol')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=150), nullable=False),
sa.Column('password', sa.String(), nullable=False),
sa.Column('is_admin', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_table('food_item',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('barcode', sa.Integer(), nullable=True),
sa.Column('owner_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=150), nullable=False),
sa.Column('energy_100', sa.Integer(), nullable=False),
sa.Column('protein_100', sa.Float(), nullable=False),
sa.Column('carbs_100', sa.Float(), nullable=False),
sa.Column('sugar_100', sa.Float(), nullable=True),
sa.Column('fat_100', sa.Float(), nullable=False),
sa.Column('saturated_fat_100', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['owner_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('barcode', 'owner_id', name='barcode_owner_key')
)
op.create_table('food_log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('datetime_created', sa.DateTime(), nullable=False),
sa.Column('date_', sa.Date(), nullable=False),
sa.Column('food_item_id', sa.Integer(), nullable=False),
sa.Column('part_of_day', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('amount', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['food_item_id'], ['food_item.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('food_log')
op.drop_table('food_item')
op.drop_table('user')
op.drop_table('unit')
# ### end Alembic commands ###

View File

@@ -0,0 +1,50 @@
"""empty message
Revision ID: bb1d9bebf8f6
Revises: 46cd3e1a3b67
Create Date: 2025-08-07 16:49:19.511091
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bb1d9bebf8f6'
down_revision = '46cd3e1a3b67'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('food_item', schema=None) as batch_op:
batch_op.alter_column('barcode',
existing_type=sa.INTEGER(),
type_=sa.String(),
existing_nullable=True)
with op.batch_alter_table('food_log', schema=None) as batch_op:
batch_op.alter_column('amount',
existing_type=sa.INTEGER(),
type_=sa.Float(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('food_log', schema=None) as batch_op:
batch_op.alter_column('amount',
existing_type=sa.Float(),
type_=sa.INTEGER(),
existing_nullable=False)
with op.batch_alter_table('food_item', schema=None) as batch_op:
batch_op.alter_column('barcode',
existing_type=sa.String(),
type_=sa.INTEGER(),
existing_nullable=True)
# ### end Alembic commands ###

18
requirements.txt Normal file
View File

@@ -0,0 +1,18 @@
alembic==1.16.1
blinker==1.9.0
click==8.2.1
colorama==0.4.6
Flask==3.1.1
Flask-Login==0.6.3
Flask-Migrate==4.1.0
Flask-SQLAlchemy==3.1.1
Flask-WTF==1.2.2
greenlet==3.2.2
itsdangerous==2.2.0
Jinja2==3.1.6
Mako==1.3.10
MarkupSafe==3.0.2
SQLAlchemy==2.0.41
typing_extensions==4.13.2
Werkzeug==3.1.3
WTForms==3.2.1

15
temp.py
View File

@@ -1,15 +0,0 @@
from application import db, app
from sqlalchemy import MetaData, Table
with app.app_context():
table_name = "_alembic_tmp_food_item"
engine = db.engine
metadata = MetaData()
metadata.reflect(bind=engine)
if table_name in metadata.tables:
tmp_table = Table(table_name, metadata, autoload_with=engine)
tmp_table.drop(engine)
print(f"Table '{table_name}' dropped.")
else:
print(f"No table named '{table_name}' found.")