MAJOR REFACTOR P3

This commit is contained in:
2025-09-05 20:44:41 +02:00
parent 12af4e7bbe
commit a2b13f3d32
13 changed files with 99 additions and 3 deletions

View File

@@ -9,7 +9,8 @@ __all__ = ["app"]
db = SQLAlchemy(app=app)
# Set up migration
migration = Migrate(app=app, db=db)
migrations_dir = Path(__file__).parent / "migrations"
migration = Migrate(app=app, db=db, directory=str(migrations_dir))
# Init and upgrade
with app.app_context():

View File

@@ -1,5 +1,5 @@
from sqlalchemy.orm import sessionmaker
from app.config import timeout as timeout_
from config import timeout as timeout_
import aiohttp
import asyncio
import time

View File

@@ -1 +0,0 @@
timeout: int = 4000

View File

@@ -1,7 +1,7 @@
from flask import Blueprint, render_template, abort, jsonify, send_file, json
from typing import cast, Optional, Any
from datetime import datetime, timedelta, timezone
from app.config import timeout
from config import timeout
from ..models import service, log
from app import app, db

1
app/migrations/README Normal file
View File

@@ -0,0 +1 @@
Single-database configuration for Flask.

View File

@@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

113
app/migrations/env.py Normal file
View File

@@ -0,0 +1,113 @@
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives
connectable = get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,37 @@
"""empty message
Revision ID: 3c05315d5b9b
Revises: f87909a4293b
Create Date: 2025-09-05 09:48:08.561045
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "3c05315d5b9b"
down_revision = "f87909a4293b"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("log", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"timeout", sa.Boolean(), nullable=False, server_default="false"
)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("log", schema=None) as batch_op:
batch_op.drop_column("timeout")
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""empty message
Revision ID: d7d380435347
Revises:
Create Date: 2025-09-02 08:43:16.682424
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd7d380435347'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('dateCreated', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('log')
# ### end Alembic commands ###

View File

@@ -0,0 +1,35 @@
"""empty message
Revision ID: f04407e8e466
Revises: d7d380435347
Create Date: 2025-09-03 15:40:30.413166
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f04407e8e466'
down_revision = 'd7d380435347'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('service',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.Column('label', sa.String(length=15), nullable=False),
sa.Column('public_access', sa.Boolean(), nullable=False),
sa.Column('ping_method', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('service')
# ### end Alembic commands ###

View File

@@ -0,0 +1,50 @@
"""empty message
Revision ID: f87909a4293b
Revises: f04407e8e466
Create Date: 2025-09-03 16:36:14.608372
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f87909a4293b"
down_revision = "f04407e8e466"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DROP TABLE IF EXISTS _alembic_tmp_log")
op.execute("DELETE FROM log")
with op.batch_alter_table("log", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"service_id", sa.Integer(), nullable=False, server_default="0"
)
)
batch_op.add_column(sa.Column("ping", sa.Integer(), nullable=True))
batch_op.create_index(
batch_op.f("ix_log_dateCreated"), ["dateCreated"], unique=False
)
batch_op.create_foreign_key(
"fk_log2service", "service", ["service_id"], ["id"]
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("log", schema=None) as batch_op:
batch_op.drop_constraint("fk_log2service", type_="foreignkey")
batch_op.drop_index(batch_op.f("ix_log_dateCreated"))
batch_op.drop_column("ping")
batch_op.drop_column("service_id")
# ### end Alembic commands ###

95
app/routes.py Normal file
View File

@@ -0,0 +1,95 @@
from flask import Blueprint, render_template, abort, jsonify, send_file, json
from typing import cast, Optional, Any
from datetime import datetime, timedelta, timezone
from config import timeout
from ..models import service, log
from app import app, db
bp = Blueprint(
"api",
"__name__",
url_prefix="/api",
static_folder="static",
)
# Prepares log data for chart.js chart
def prepare_chart_data(
logs: list[log],
) -> tuple[list[str], list[Optional[int]]]:
if len(logs) <= 0: # Return empty if there are no logs
return ([], [])
x = [logs[0].dateCreatedUTC().isoformat()]
y = [logs[0].ping]
for i in range(1, len(logs)):
log1 = logs[i]
log2 = logs[i - 1]
# Check if the gap in points exceeds a threshold
if (abs(log1.dateCreatedUTC() - log2.dateCreatedUTC())) > timedelta(
milliseconds=1.5 * (timeout + 1000)
):
x.append(log2.dateCreatedUTC().isoformat())
y.append(None)
x.append(log1.dateCreatedUTC().isoformat())
y.append(log1.ping)
return (x, y)
@bp.route("/")
def homepage():
return render_template("home.html")
@bp.route("/chart/<int:id>")
def chart(id: int):
with app.app_context():
logs = []
s = db.session.query(service).filter_by(id=id).first()
if s:
logs = cast(
list[log],
s.logs.order_by(log.dateCreated.desc()) # type: ignore
.limit(300)
.all(),
)
else:
return abort(code=403)
x, y = prepare_chart_data(logs=logs)
now = datetime.now(timezone.utc)
max_ = now
min_ = now - timedelta(hours=1)
return render_template(
"chart.html",
dates=x,
values=json.dumps(y),
min=min_.isoformat(),
max=max_.isoformat(),
)
@bp.route("/status")
def status():
results: list[dict[str, Any]] = []
with app.app_context():
a = db.session.query(service).all()
for s in a:
b = cast(
Optional[log],
s.logs.order_by(
log.dateCreated.desc() # type: ignore
).first(),
)
if b:
results.append(s.to_dict() | b.to_dict())
return jsonify(results)
@bp.route("/favicon.svg")
def favicon():
return send_file("/static/favicon.svg")