Compare commits

...

9 Commits

Author SHA1 Message Date
55e8356b1b Added TODO notes 2025-09-05 21:17:47 +02:00
8023978307 Changed timeout to 1s which will make it ping every 2s. This is to test 2025-09-05 21:12:31 +02:00
5617440013 Did some chart stuff (ugly) 2025-09-05 21:09:31 +02:00
7c5e15af99 gets all logs from past hour rather than past 300 logs 2025-09-05 21:00:05 +02:00
89f5c4ef0b MAJOR REFACTOR P4 FINAL 2025-09-05 20:47:11 +02:00
e7cf9edde9 Update routes.py 2025-09-05 20:45:00 +02:00
a2b13f3d32 MAJOR REFACTOR P3 2025-09-05 20:44:41 +02:00
12af4e7bbe MAJOR REFACTOR P2 2025-09-05 20:41:37 +02:00
4cfd5b2dbe MAJOR REFACTOR P1 2025-09-05 20:39:42 +02:00
37 changed files with 412 additions and 310 deletions

137
app.py
View File

@@ -1,127 +1,20 @@
# import requests as r
from flask import jsonify, render_template, send_file, abort
from poll_services import start_async_loop
from mem import services, app, db
from app.flask_app import start_flask, stop_event as flask_stop
from app.aio_client import start_worker, stop_event as aio_stop
import threading
from flask_migrate import upgrade, stamp
from pathlib import Path
from models import service, log
from typing import Any, Optional, cast
import json
from datetime import datetime, timezone, timedelta
from config import timeout
# Prepares log data for chart.js chart
def prepare_chart_data(
logs: list[log],
) -> tuple[list[str], list[Optional[int]]]:
if len(logs) <= 0: # Return empty if there are no logs
return ([], [])
x = [logs[0].dateCreatedUTC().isoformat()]
y = [logs[0].ping]
for i in range(1, len(logs)):
log1 = logs[i]
log2 = logs[i - 1]
# Check if the gap in points exceeds a threshold
if (abs(log1.dateCreatedUTC() - log2.dateCreatedUTC())) > timedelta(
milliseconds=1.5 * (timeout + 1000)
):
x.append(log2.dateCreatedUTC().isoformat())
y.append(None)
x.append(log1.dateCreatedUTC().isoformat())
y.append(log1.ping)
return (x, y)
# Init and upgrade
with app.app_context():
# Check if DB file is missing
if not (Path("./instance/app.db").is_file()):
with app.app_context():
db.create_all()
stamp()
# Upgrade db if any new migrations exist
upgrade()
with app.app_context():
if not db.session.query(service).first():
for s in services:
db.session.add(
service(
url=s.url,
label=s.label,
public_access=s.public,
ping_method=s.ping_type,
)
)
db.session.commit()
@app.route("/")
def homepage():
return render_template("home.html")
@app.route("/chart/<int:id>")
def chart(id: int):
with app.app_context():
logs = []
s = db.session.query(service).filter_by(id=id).first()
if s:
logs = cast(
list[log],
s.logs.order_by(log.dateCreated.desc()) # type: ignore
.limit(300)
.all(),
)
else:
return abort(code=403)
x, y = prepare_chart_data(logs=logs)
now = datetime.now(timezone.utc)
max_ = now
min_ = now - timedelta(hours=1)
return render_template(
"chart.html",
dates=x,
values=json.dumps(y),
min=min_.isoformat(),
max=max_.isoformat(),
)
@app.route("/api/status")
def status():
results: list[dict[str, Any]] = []
with app.app_context():
a = db.session.query(service).all()
for s in a:
b = cast(
Optional[log],
s.logs.order_by(
log.dateCreated.desc() # type: ignore
).first(),
)
if b:
results.append(s.to_dict() | b.to_dict())
return jsonify(results)
@app.route("/favicon.svg")
def favicon():
return send_file("/static/favicon.svg")
import sys
import time
# Only run if directly running file
if __name__ == "__main__":
t = threading.Thread(target=start_async_loop, daemon=True)
t.start()
threading.Thread(target=start_worker, daemon=True).start()
# Run flask app
app.run(host="0.0.0.0", port=80, debug=True, use_reloader=False)
threading.Thread(target=start_flask, daemon=True).start()
# Optional: monitor stop_event in a separate thread
def monitor_worker():
while not aio_stop.is_set() and not flask_stop.is_set():
time.sleep(1)
print("Worker failed, stopping program...")
sys.exit(1)
monitor_worker()

22
app/__init__.py Normal file
View File

@@ -0,0 +1,22 @@
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate, upgrade, stamp
from .flask_app import app
from pathlib import Path
__all__ = ["app"]
# Create db
db = SQLAlchemy(app=app)
# Set up migration
migrations_dir = Path(__file__).parent / "migrations"
migration = Migrate(app=app, db=db, directory=str(migrations_dir))
# Init and upgrade
with app.app_context():
# Check if DB file is missing
if not (Path("./instance/app.db").is_file()):
db.create_all()
stamp()
# Upgrade db if any new migrations exist
upgrade()

View File

@@ -0,0 +1,3 @@
from .worker import start_worker, stop_event
__all__ = ["start_worker", "stop_event"]

47
app/aio_client/client.py Normal file
View File

@@ -0,0 +1,47 @@
import aiohttp
from app.models import service, log
from types import SimpleNamespace
from typing import Optional
async def ping(
client: aiohttp.ClientSession,
s: service,
ctx: Optional[SimpleNamespace] = None,
) -> int:
ctx = ctx or SimpleNamespace()
match s.ping_method:
case 0:
r = await client.head(
url=s.url,
ssl=True if s.public_access else False,
allow_redirects=True,
trace_request_ctx=ctx, # type: ignore
)
case 1:
r = await client.get(
url=s.url,
ssl=True if s.public_access else False,
allow_redirects=True,
trace_request_ctx=ctx, # type: ignore
)
case _:
raise Exception("UNKNOWN PING METHOD")
return r.status
async def ping_service(client: aiohttp.ClientSession, s: service) -> log:
try:
ctx = SimpleNamespace()
status = await ping(client=client, s=s, ctx=ctx)
if status == 200:
return log(service_id=s.id, ping=int(ctx.duration_ms))
else:
return log(service_id=s.id, ping=None)
except aiohttp.ConnectionTimeoutError:
return log(service_id=s.id, ping=None, timeout=True)
except Exception as e:
print(e)
return log(service_id=s.id, ping=None)

91
app/aio_client/worker.py Normal file
View File

@@ -0,0 +1,91 @@
from sqlalchemy.orm import sessionmaker
from config import timeout as timeout_
import aiohttp
import asyncio
import time
from types import SimpleNamespace
from app import db, app
from app.models import service
from .client import ping_service
import threading
stop_event = threading.Event()
def start_worker():
try:
# Creates new event loop in new thread
loop = asyncio.new_event_loop()
# Creates new task on new loop
loop.create_task(update_services())
# Schedule loop to run forever
loop.run_forever()
except Exception as e:
print("Worker thread exception:", e)
stop_event.set()
def setup_client() -> aiohttp.ClientSession:
timeout = aiohttp.client.ClientTimeout(total=timeout_ / 1000)
# Each request will get its own context
trace_config = aiohttp.TraceConfig()
async def on_start(
session: aiohttp.ClientSession,
context: SimpleNamespace,
params: aiohttp.TraceRequestStartParams,
):
ctx = context.trace_request_ctx
ctx.start = time.perf_counter() # store per-request
async def on_end(
session: aiohttp.ClientSession,
context: SimpleNamespace,
params: aiohttp.TraceRequestEndParams,
):
ctx = context.trace_request_ctx
ctx.end = time.perf_counter()
ctx.duration_ms = int((ctx.end - ctx.start) * 1000)
trace_config.on_request_start.append(on_start)
trace_config.on_request_end.append(on_end)
client = aiohttp.ClientSession(
timeout=timeout, auto_decompress=False, trace_configs=[trace_config]
)
return client
async def update_services():
try:
print("Starting service updates...")
# Create new session
with app.app_context():
WorkerSession = sessionmaker(bind=db.engine)
client = setup_client()
# Actual update loop
while True:
session = WorkerSession()
# TODO: Add http processing time var
# instead of just adding 1 second
sleeptask = asyncio.create_task(asyncio.sleep(timeout_ / 1000 + 1))
tasks = [
ping_service(client=client, s=s)
for s in session.query(service).all()
]
logs = await asyncio.gather(*tasks)
await sleeptask
try:
session.add_all(logs)
session.commit()
except Exception as e:
session.rollback()
raise e
finally:
session.close()
except Exception as e:
print("Worker thread exception:", e)
stop_event.set()

20
app/flask_app/__init__.py Normal file
View File

@@ -0,0 +1,20 @@
import threading
from flask import Flask
stop_event = threading.Event()
# Flask app to serve status
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///app.db"
def start_flask() -> None:
try:
# Run flask app
from .routes import bp
app.register_blueprint(bp)
app.run(host="0.0.0.0", port=80, debug=True, use_reloader=False)
except Exception as e:
print("Worker thread exception:", e)
stop_event.set()

101
app/flask_app/routes.py Normal file
View File

@@ -0,0 +1,101 @@
from flask import Blueprint, render_template, abort, jsonify, send_file, json
from typing import cast, Optional, Any
from datetime import datetime, timedelta, timezone
from config import timeout
from ..models import service, log
from app import app, db
bp = Blueprint(
"main",
"__name__",
url_prefix="/",
static_folder="static",
)
# TODO: Move util functions to seperate file (utils.py?)
# Prepares log data for chart.js chart
def prepare_chart_data(
logs: list[log],
) -> tuple[list[str], list[Optional[int]]]:
if len(logs) <= 0: # Return empty if there are no logs
return ([], [])
x = [logs[0].dateCreatedUTC().isoformat()]
y = [logs[0].ping]
for i in range(1, len(logs)):
log1 = logs[i]
log2 = logs[i - 1]
# TODO: add timeout duration to log so this can be used
# instead of 1.5 * the current timeout + 1.
# Check if the gap in points exceeds a threshold
if (abs(log1.dateCreatedUTC() - log2.dateCreatedUTC())) > timedelta(
milliseconds=1.5 * (timeout + 1000)
):
x.append(log2.dateCreatedUTC().isoformat())
y.append(None)
x.append(log1.dateCreatedUTC().isoformat())
y.append(log1.ping)
return (x, y)
@bp.route("/")
def homepage():
return render_template("home.html")
@bp.route("/chart/<int:id>")
def chart(id: int):
one_hour_ago = datetime.now(timezone.utc) - timedelta(hours=1)
with app.app_context():
logs = []
s = db.session.query(service).filter_by(id=id).first()
if s:
logs = cast(
list[log],
s.logs.filter(log.dateCreated >= one_hour_ago)
.order_by(log.dateCreated.desc()) # type: ignore
.all(),
)
else:
return abort(code=403)
x, y = prepare_chart_data(logs=logs)
now = datetime.now(timezone.utc)
max_ = now
min_ = now - timedelta(hours=1)
return render_template(
"chart.html",
dates=x,
values=json.dumps(y),
min=min_.isoformat(),
max=max_.isoformat(),
)
@bp.route("/api/status")
def status():
results: list[dict[str, Any]] = []
with app.app_context():
a = db.session.query(service).all()
for s in a:
b = cast(
Optional[log],
s.logs.order_by(
log.dateCreated.desc() # type: ignore
).first(),
)
if b:
results.append(s.to_dict() | b.to_dict())
return jsonify(results)
@bp.route("/favicon.svg")
def favicon():
return send_file("/static/favicon.svg")

View File

Before

Width:  |  Height:  |  Size: 2.0 KiB

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

Before

Width:  |  Height:  |  Size: 1.5 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

Before

Width:  |  Height:  |  Size: 191 B

After

Width:  |  Height:  |  Size: 191 B

View File

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

Before

Width:  |  Height:  |  Size: 680 B

After

Width:  |  Height:  |  Size: 680 B

View File

Before

Width:  |  Height:  |  Size: 550 B

After

Width:  |  Height:  |  Size: 550 B

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

Before

Width:  |  Height:  |  Size: 5.5 KiB

After

Width:  |  Height:  |  Size: 5.5 KiB

View File

Before

Width:  |  Height:  |  Size: 389 B

After

Width:  |  Height:  |  Size: 389 B

View File

Before

Width:  |  Height:  |  Size: 1.0 KiB

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

Before

Width:  |  Height:  |  Size: 7.8 KiB

After

Width:  |  Height:  |  Size: 7.8 KiB

View File

Before

Width:  |  Height:  |  Size: 5.4 KiB

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

Before

Width:  |  Height:  |  Size: 8.9 KiB

After

Width:  |  Height:  |  Size: 8.9 KiB

View File

Before

Width:  |  Height:  |  Size: 950 B

After

Width:  |  Height:  |  Size: 950 B

View File

Before

Width:  |  Height:  |  Size: 642 B

After

Width:  |  Height:  |  Size: 642 B

View File

@@ -1,10 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<html lang="en" data-bs-theme="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="viewport" content="width=device-width, initial-scale=1" charset=" UTF-8">
<title>Simple Line Chart</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.8/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="sha384-sRIl4kxILFvY47J16cr9ZwB07vP4J8+LH7qKQnuqkuIAvNWLzeN8tE5YBujZqJLB" crossorigin="anonymous">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg">
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script src="https://cdn.jsdelivr.net/npm/chartjs-adapter-date-fns"></script>
<script src="https://www.chartjs.org/docs/latest/samples/utils.js"></script>
@@ -43,14 +45,18 @@
x: {
type: 'time', // Important for datetime axis
time: {
unit: 'hour',
unit: 'minute',
tooltipFormat: 'HH:mm:ss',
displayFormats: {
hour: 'HH:mm'
}
displayFormats: { hour: 'HH:mm' }
},
min: min,
max: max
max: max,
ticks: { color: '#ffffff' }, // X-axis labels
grid: { color: 'rgba(255, 255, 255, 0.1)' } // X-axis grid lines
},
y: {
ticks: { color: '#ffffff' }, // Y-axis labels
grid: { color: 'rgba(255, 255, 255, 0.1)' } // Y-axis grid lines
}
}
}

View File

@@ -82,10 +82,10 @@
</div>
<div class="position-relative ratio ratio-1x1">
<div class="d-flex justify-content-center align-items-center">
<img src="static/icons/${s.service_id - 1}.svg" class="img-fluid w-75">
<img src="{{ url_for('static', filename="icons") }}/${s.service_id - 1}.svg" class="img-fluid w-75">
</div>
<div>
${s.public_access ? `` : `<img src='static/lock.svg' class='img-fluid position-absolute bottom-0 end-0 w-25'>`}
${s.public_access ? `` : `<img src='{{ url_for('static', filename='lock.svg') }}' class='img-fluid position-absolute bottom-0 end-0 w-25'>`}
<div class="position-absolute bottom-0 text-body bg-dark bg-opacity-75 px-1 rounded">${s.ping ? s.ping + "ms" : ""}</div>
</div>
</div>

View File

@@ -1,4 +1,4 @@
from mem import db
from app import db
from datetime import datetime, timezone
from validators import url as is_url
from typing import Any, Optional

95
app/routes.py Normal file
View File

@@ -0,0 +1,95 @@
from flask import Blueprint, render_template, abort, jsonify, send_file, json
from typing import cast, Optional, Any
from datetime import datetime, timedelta, timezone
from config import timeout
from .models import service, log
from app import app, db
bp = Blueprint(
"main",
"__name__",
url_prefix="/",
static_folder="static",
)
# Prepares log data for chart.js chart
def prepare_chart_data(
logs: list[log],
) -> tuple[list[str], list[Optional[int]]]:
if len(logs) <= 0: # Return empty if there are no logs
return ([], [])
x = [logs[0].dateCreatedUTC().isoformat()]
y = [logs[0].ping]
for i in range(1, len(logs)):
log1 = logs[i]
log2 = logs[i - 1]
# Check if the gap in points exceeds a threshold
if (abs(log1.dateCreatedUTC() - log2.dateCreatedUTC())) > timedelta(
milliseconds=1.5 * (timeout + 1000)
):
x.append(log2.dateCreatedUTC().isoformat())
y.append(None)
x.append(log1.dateCreatedUTC().isoformat())
y.append(log1.ping)
return (x, y)
@bp.route("/")
def homepage():
return render_template("home.html")
@bp.route("/chart/<int:id>")
def chart(id: int):
with app.app_context():
logs = []
s = db.session.query(service).filter_by(id=id).first()
if s:
logs = cast(
list[log],
s.logs.order_by(log.dateCreated.desc()) # type: ignore
.limit(300)
.all(),
)
else:
return abort(code=403)
x, y = prepare_chart_data(logs=logs)
now = datetime.now(timezone.utc)
max_ = now
min_ = now - timedelta(hours=1)
return render_template(
"chart.html",
dates=x,
values=json.dumps(y),
min=min_.isoformat(),
max=max_.isoformat(),
)
@bp.route("/api/status")
def status():
results: list[dict[str, Any]] = []
with app.app_context():
a = db.session.query(service).all()
for s in a:
b = cast(
Optional[log],
s.logs.order_by(
log.dateCreated.desc() # type: ignore
).first(),
)
if b:
results.append(s.to_dict() | b.to_dict())
return jsonify(results)
@bp.route("/favicon.svg")
def favicon():
return send_file("/static/favicon.svg")

View File

@@ -1 +1 @@
timeout: int = 4000
timeout: int = 1000

View File

@@ -1,62 +0,0 @@
from typing import Any
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
class service:
id: int
url: str
online: bool
public: bool
ping_type: int
def __init__(
self,
id: int,
url: str = "",
label: str = "",
public: bool = True,
ping_type: int = 0,
):
self.id = id
self.url = url
self.public = public
self.label = label
self.ping_type = ping_type
def to_dict(self) -> dict[str, Any]:
return {
"url": self.url,
"public": self.public,
"label": self.label,
"id": self.id,
"ping_type": self.ping_type,
}
services: list[service] = [
service(0, "https://git.ihatemen.uk/", "Gitea"),
service(1, "https://plex.ihatemen.uk/", "Plex"),
service(2, "https://truenas.local/", "TrueNAS", False),
service(3, "https://cloud.ihatemen.uk/", "NextCloud"),
service(4, "https://request.ihatemen.uk/", "Overseerr"),
service(5, "https://id.ihatemen.uk/", "PocketID"),
service(6, "https://tautulli.local/", "Tautulli", False),
service(
7, "https://transmission.local/", "Transmission", False, ping_type=1
),
service(8, "https://vault.ihatemen.uk/", "Vault Warden"),
service(9, "https://nginx.local/", "Nginx (NPM)", False),
service(10, "https://app.ihatemen.uk/", "Kcal Counter"),
service(
id=11, url="https://unifi.local/", label="Unifi Server", public=False
),
]
# Flask app to serve status
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///app.db"
db = SQLAlchemy(app=app)
migration = Migrate(app=app, db=db)

View File

@@ -1,114 +0,0 @@
from mem import db, app
import aiohttp
import asyncio
import time
from models import log, service
from sqlalchemy.orm import sessionmaker
from config import timeout as timeout_
from typing import Optional
from types import SimpleNamespace
async def ping(
client: aiohttp.ClientSession,
s: service,
ctx: Optional[SimpleNamespace] = None,
) -> int:
ctx = ctx or SimpleNamespace()
match s.ping_method:
case 0:
r = await client.head(
url=s.url,
ssl=True if s.public_access else False,
allow_redirects=True,
trace_request_ctx=ctx, # type: ignore
)
case 1:
r = await client.get(
url=s.url,
ssl=True if s.public_access else False,
allow_redirects=True,
trace_request_ctx=ctx, # type: ignore
)
case _:
raise Exception("UNKNOWN PING METHOD")
return r.status
async def check_service(client: aiohttp.ClientSession, s: service) -> log:
try:
ctx = SimpleNamespace()
status = await ping(client=client, s=s, ctx=ctx)
if status == 200:
return log(service_id=s.id, ping=int(ctx.duration_ms))
else:
return log(service_id=s.id, ping=None)
except aiohttp.ConnectionTimeoutError:
return log(service_id=s.id, ping=None)
except Exception:
return log(service_id=s.id, ping=None)
def start_async_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
asyncio.run_coroutine_threadsafe(update_services(loop=loop), loop=loop)
loop.run_forever()
def setup_client() -> aiohttp.ClientSession:
timeout = aiohttp.client.ClientTimeout(total=timeout_ / 1000)
# Each request will get its own context
trace_config = aiohttp.TraceConfig()
async def on_start(
session: aiohttp.ClientSession,
context: SimpleNamespace,
params: aiohttp.TraceRequestStartParams,
):
ctx = context.trace_request_ctx
ctx.start = time.perf_counter() # store per-request
async def on_end(
session: aiohttp.ClientSession,
context: SimpleNamespace,
params: aiohttp.TraceRequestEndParams,
):
ctx = context.trace_request_ctx
ctx.end = time.perf_counter()
ctx.duration_ms = int((ctx.end - ctx.start) * 1000)
trace_config.on_request_start.append(on_start)
trace_config.on_request_end.append(on_end)
client = aiohttp.ClientSession(
timeout=timeout, auto_decompress=False, trace_configs=[trace_config]
)
return client
async def update_services(loop: asyncio.AbstractEventLoop):
print("Starting service updates...")
# Create new session
with app.app_context():
WorkerSession = sessionmaker(bind=db.engine)
client = setup_client()
# Actual update loop
while True:
session = WorkerSession()
sleeptask = asyncio.create_task(asyncio.sleep(timeout_ / 1000 + 1))
tasks = [
check_service(client=client, s=s)
for s in session.query(service).all()
]
logs = await asyncio.gather(*tasks)
await sleeptask
try:
session.add_all(logs)
session.commit()
except Exception as e:
session.rollback()
raise e
finally:
session.close()