MAJOR REFACTOR P1
137
app.py
@@ -1,127 +1,20 @@
|
|||||||
# import requests as r
|
from app.flask_app import start_flask, stop_event as flask_stop
|
||||||
from flask import jsonify, render_template, send_file, abort
|
from app.aio_client import start_worker, stop_event as aio_stop
|
||||||
from poll_services import start_async_loop
|
|
||||||
from mem import services, app, db
|
|
||||||
import threading
|
import threading
|
||||||
from flask_migrate import upgrade, stamp
|
import sys
|
||||||
from pathlib import Path
|
import time
|
||||||
from models import service, log
|
|
||||||
from typing import Any, Optional, cast
|
|
||||||
import json
|
|
||||||
from datetime import datetime, timezone, timedelta
|
|
||||||
from config import timeout
|
|
||||||
|
|
||||||
|
|
||||||
# Prepares log data for chart.js chart
|
|
||||||
def prepare_chart_data(
|
|
||||||
logs: list[log],
|
|
||||||
) -> tuple[list[str], list[Optional[int]]]:
|
|
||||||
if len(logs) <= 0: # Return empty if there are no logs
|
|
||||||
return ([], [])
|
|
||||||
|
|
||||||
x = [logs[0].dateCreatedUTC().isoformat()]
|
|
||||||
y = [logs[0].ping]
|
|
||||||
|
|
||||||
for i in range(1, len(logs)):
|
|
||||||
log1 = logs[i]
|
|
||||||
log2 = logs[i - 1]
|
|
||||||
|
|
||||||
# Check if the gap in points exceeds a threshold
|
|
||||||
if (abs(log1.dateCreatedUTC() - log2.dateCreatedUTC())) > timedelta(
|
|
||||||
milliseconds=1.5 * (timeout + 1000)
|
|
||||||
):
|
|
||||||
x.append(log2.dateCreatedUTC().isoformat())
|
|
||||||
y.append(None)
|
|
||||||
|
|
||||||
x.append(log1.dateCreatedUTC().isoformat())
|
|
||||||
y.append(log1.ping)
|
|
||||||
return (x, y)
|
|
||||||
|
|
||||||
|
|
||||||
# Init and upgrade
|
|
||||||
with app.app_context():
|
|
||||||
# Check if DB file is missing
|
|
||||||
if not (Path("./instance/app.db").is_file()):
|
|
||||||
with app.app_context():
|
|
||||||
db.create_all()
|
|
||||||
stamp()
|
|
||||||
# Upgrade db if any new migrations exist
|
|
||||||
upgrade()
|
|
||||||
|
|
||||||
with app.app_context():
|
|
||||||
if not db.session.query(service).first():
|
|
||||||
for s in services:
|
|
||||||
db.session.add(
|
|
||||||
service(
|
|
||||||
url=s.url,
|
|
||||||
label=s.label,
|
|
||||||
public_access=s.public,
|
|
||||||
ping_method=s.ping_type,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/")
|
|
||||||
def homepage():
|
|
||||||
return render_template("home.html")
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/chart/<int:id>")
|
|
||||||
def chart(id: int):
|
|
||||||
with app.app_context():
|
|
||||||
logs = []
|
|
||||||
s = db.session.query(service).filter_by(id=id).first()
|
|
||||||
if s:
|
|
||||||
logs = cast(
|
|
||||||
list[log],
|
|
||||||
s.logs.order_by(log.dateCreated.desc()) # type: ignore
|
|
||||||
.limit(300)
|
|
||||||
.all(),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return abort(code=403)
|
|
||||||
x, y = prepare_chart_data(logs=logs)
|
|
||||||
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
max_ = now
|
|
||||||
min_ = now - timedelta(hours=1)
|
|
||||||
return render_template(
|
|
||||||
"chart.html",
|
|
||||||
dates=x,
|
|
||||||
values=json.dumps(y),
|
|
||||||
min=min_.isoformat(),
|
|
||||||
max=max_.isoformat(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/api/status")
|
|
||||||
def status():
|
|
||||||
results: list[dict[str, Any]] = []
|
|
||||||
with app.app_context():
|
|
||||||
a = db.session.query(service).all()
|
|
||||||
for s in a:
|
|
||||||
b = cast(
|
|
||||||
Optional[log],
|
|
||||||
s.logs.order_by(
|
|
||||||
log.dateCreated.desc() # type: ignore
|
|
||||||
).first(),
|
|
||||||
)
|
|
||||||
if b:
|
|
||||||
results.append(s.to_dict() | b.to_dict())
|
|
||||||
|
|
||||||
return jsonify(results)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/favicon.svg")
|
|
||||||
def favicon():
|
|
||||||
return send_file("/static/favicon.svg")
|
|
||||||
|
|
||||||
|
|
||||||
# Only run if directly running file
|
# Only run if directly running file
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
t = threading.Thread(target=start_async_loop, daemon=True)
|
threading.Thread(target=start_worker, daemon=True).start()
|
||||||
t.start()
|
|
||||||
|
|
||||||
# Run flask app
|
threading.Thread(target=start_flask, daemon=True).start()
|
||||||
app.run(host="0.0.0.0", port=80, debug=True, use_reloader=False)
|
|
||||||
|
# Optional: monitor stop_event in a separate thread
|
||||||
|
def monitor_worker():
|
||||||
|
while not aio_stop.is_set() and not flask_stop.is_set():
|
||||||
|
time.sleep(1)
|
||||||
|
print("Worker failed, stopping program...")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
monitor_worker()
|
||||||
|
|||||||
21
app/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from flask_sqlalchemy import SQLAlchemy
|
||||||
|
from flask_migrate import Migrate, upgrade, stamp
|
||||||
|
from .flask_app import app
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
__all__ = ["app"]
|
||||||
|
|
||||||
|
# Create db
|
||||||
|
db = SQLAlchemy(app=app)
|
||||||
|
|
||||||
|
# Set up migration
|
||||||
|
migration = Migrate(app=app, db=db)
|
||||||
|
|
||||||
|
# Init and upgrade
|
||||||
|
with app.app_context():
|
||||||
|
# Check if DB file is missing
|
||||||
|
if not (Path("./instance/app.db").is_file()):
|
||||||
|
db.create_all()
|
||||||
|
stamp()
|
||||||
|
# Upgrade db if any new migrations exist
|
||||||
|
upgrade()
|
||||||
3
app/aio_client/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .worker import start_worker, stop_event
|
||||||
|
|
||||||
|
__all__ = ["start_worker", "stop_event"]
|
||||||
47
app/aio_client/client.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import aiohttp
|
||||||
|
from app.models import service, log
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
async def ping(
|
||||||
|
client: aiohttp.ClientSession,
|
||||||
|
s: service,
|
||||||
|
ctx: Optional[SimpleNamespace] = None,
|
||||||
|
) -> int:
|
||||||
|
ctx = ctx or SimpleNamespace()
|
||||||
|
match s.ping_method:
|
||||||
|
case 0:
|
||||||
|
r = await client.head(
|
||||||
|
url=s.url,
|
||||||
|
ssl=True if s.public_access else False,
|
||||||
|
allow_redirects=True,
|
||||||
|
trace_request_ctx=ctx, # type: ignore
|
||||||
|
)
|
||||||
|
case 1:
|
||||||
|
r = await client.get(
|
||||||
|
url=s.url,
|
||||||
|
ssl=True if s.public_access else False,
|
||||||
|
allow_redirects=True,
|
||||||
|
trace_request_ctx=ctx, # type: ignore
|
||||||
|
)
|
||||||
|
case _:
|
||||||
|
raise Exception("UNKNOWN PING METHOD")
|
||||||
|
|
||||||
|
return r.status
|
||||||
|
|
||||||
|
|
||||||
|
async def ping_service(client: aiohttp.ClientSession, s: service) -> log:
|
||||||
|
try:
|
||||||
|
ctx = SimpleNamespace()
|
||||||
|
status = await ping(client=client, s=s, ctx=ctx)
|
||||||
|
|
||||||
|
if status == 200:
|
||||||
|
return log(service_id=s.id, ping=int(ctx.duration_ms))
|
||||||
|
else:
|
||||||
|
return log(service_id=s.id, ping=None)
|
||||||
|
except aiohttp.ConnectionTimeoutError:
|
||||||
|
return log(service_id=s.id, ping=None, timeout=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return log(service_id=s.id, ping=None)
|
||||||
89
app/aio_client/worker.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from app.config import timeout as timeout_
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from app import db, app
|
||||||
|
from app.models import service
|
||||||
|
from .client import ping_service
|
||||||
|
import threading
|
||||||
|
|
||||||
|
stop_event = threading.Event()
|
||||||
|
|
||||||
|
|
||||||
|
def start_worker():
|
||||||
|
try:
|
||||||
|
# Creates new event loop in new thread
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
|
||||||
|
# Creates new task on new loop
|
||||||
|
loop.create_task(update_services())
|
||||||
|
|
||||||
|
# Schedule loop to run forever
|
||||||
|
loop.run_forever()
|
||||||
|
except Exception as e:
|
||||||
|
print("Worker thread exception:", e)
|
||||||
|
stop_event.set()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_client() -> aiohttp.ClientSession:
|
||||||
|
timeout = aiohttp.client.ClientTimeout(total=timeout_ / 1000)
|
||||||
|
# Each request will get its own context
|
||||||
|
trace_config = aiohttp.TraceConfig()
|
||||||
|
|
||||||
|
async def on_start(
|
||||||
|
session: aiohttp.ClientSession,
|
||||||
|
context: SimpleNamespace,
|
||||||
|
params: aiohttp.TraceRequestStartParams,
|
||||||
|
):
|
||||||
|
ctx = context.trace_request_ctx
|
||||||
|
ctx.start = time.perf_counter() # store per-request
|
||||||
|
|
||||||
|
async def on_end(
|
||||||
|
session: aiohttp.ClientSession,
|
||||||
|
context: SimpleNamespace,
|
||||||
|
params: aiohttp.TraceRequestEndParams,
|
||||||
|
):
|
||||||
|
ctx = context.trace_request_ctx
|
||||||
|
ctx.end = time.perf_counter()
|
||||||
|
ctx.duration_ms = int((ctx.end - ctx.start) * 1000)
|
||||||
|
|
||||||
|
trace_config.on_request_start.append(on_start)
|
||||||
|
trace_config.on_request_end.append(on_end)
|
||||||
|
client = aiohttp.ClientSession(
|
||||||
|
timeout=timeout, auto_decompress=False, trace_configs=[trace_config]
|
||||||
|
)
|
||||||
|
return client
|
||||||
|
|
||||||
|
|
||||||
|
async def update_services():
|
||||||
|
try:
|
||||||
|
print("Starting service updates...")
|
||||||
|
# Create new session
|
||||||
|
with app.app_context():
|
||||||
|
WorkerSession = sessionmaker(bind=db.engine)
|
||||||
|
|
||||||
|
client = setup_client()
|
||||||
|
|
||||||
|
# Actual update loop
|
||||||
|
while True:
|
||||||
|
session = WorkerSession()
|
||||||
|
sleeptask = asyncio.create_task(asyncio.sleep(timeout_ / 1000 + 1))
|
||||||
|
tasks = [
|
||||||
|
ping_service(client=client, s=s)
|
||||||
|
for s in session.query(service).all()
|
||||||
|
]
|
||||||
|
logs = await asyncio.gather(*tasks)
|
||||||
|
await sleeptask
|
||||||
|
try:
|
||||||
|
session.add_all(logs)
|
||||||
|
session.commit()
|
||||||
|
except Exception as e:
|
||||||
|
session.rollback()
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
except Exception as e:
|
||||||
|
print("Worker thread exception:", e)
|
||||||
|
stop_event.set()
|
||||||
20
app/flask_app.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import threading
|
||||||
|
from flask import Flask
|
||||||
|
|
||||||
|
stop_event = threading.Event()
|
||||||
|
|
||||||
|
# Flask app to serve status
|
||||||
|
app = Flask(__name__)
|
||||||
|
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///app.db"
|
||||||
|
|
||||||
|
|
||||||
|
def start_flask() -> None:
|
||||||
|
try:
|
||||||
|
# Run flask app
|
||||||
|
from .routes import bp
|
||||||
|
|
||||||
|
app.register_blueprint(bp)
|
||||||
|
app.run(host="0.0.0.0", port=80, debug=True, use_reloader=False)
|
||||||
|
except Exception as e:
|
||||||
|
print("Worker thread exception:", e)
|
||||||
|
stop_event.set()
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
from mem import db
|
from app import db
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from validators import url as is_url
|
from validators import url as is_url
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
95
app/routes.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
from flask import Blueprint, render_template, abort, jsonify, send_file, json
|
||||||
|
from typing import cast, Optional, Any
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from app.config import timeout
|
||||||
|
from .models import service, log
|
||||||
|
from app import app, db
|
||||||
|
|
||||||
|
bp = Blueprint(
|
||||||
|
"api",
|
||||||
|
"__name__",
|
||||||
|
url_prefix="/api",
|
||||||
|
static_folder="static",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Prepares log data for chart.js chart
|
||||||
|
def prepare_chart_data(
|
||||||
|
logs: list[log],
|
||||||
|
) -> tuple[list[str], list[Optional[int]]]:
|
||||||
|
if len(logs) <= 0: # Return empty if there are no logs
|
||||||
|
return ([], [])
|
||||||
|
|
||||||
|
x = [logs[0].dateCreatedUTC().isoformat()]
|
||||||
|
y = [logs[0].ping]
|
||||||
|
|
||||||
|
for i in range(1, len(logs)):
|
||||||
|
log1 = logs[i]
|
||||||
|
log2 = logs[i - 1]
|
||||||
|
|
||||||
|
# Check if the gap in points exceeds a threshold
|
||||||
|
if (abs(log1.dateCreatedUTC() - log2.dateCreatedUTC())) > timedelta(
|
||||||
|
milliseconds=1.5 * (timeout + 1000)
|
||||||
|
):
|
||||||
|
x.append(log2.dateCreatedUTC().isoformat())
|
||||||
|
y.append(None)
|
||||||
|
|
||||||
|
x.append(log1.dateCreatedUTC().isoformat())
|
||||||
|
y.append(log1.ping)
|
||||||
|
return (x, y)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/")
|
||||||
|
def homepage():
|
||||||
|
return render_template("home.html")
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/chart/<int:id>")
|
||||||
|
def chart(id: int):
|
||||||
|
with app.app_context():
|
||||||
|
logs = []
|
||||||
|
s = db.session.query(service).filter_by(id=id).first()
|
||||||
|
if s:
|
||||||
|
logs = cast(
|
||||||
|
list[log],
|
||||||
|
s.logs.order_by(log.dateCreated.desc()) # type: ignore
|
||||||
|
.limit(300)
|
||||||
|
.all(),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return abort(code=403)
|
||||||
|
x, y = prepare_chart_data(logs=logs)
|
||||||
|
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
max_ = now
|
||||||
|
min_ = now - timedelta(hours=1)
|
||||||
|
return render_template(
|
||||||
|
"chart.html",
|
||||||
|
dates=x,
|
||||||
|
values=json.dumps(y),
|
||||||
|
min=min_.isoformat(),
|
||||||
|
max=max_.isoformat(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/status")
|
||||||
|
def status():
|
||||||
|
results: list[dict[str, Any]] = []
|
||||||
|
with app.app_context():
|
||||||
|
a = db.session.query(service).all()
|
||||||
|
for s in a:
|
||||||
|
b = cast(
|
||||||
|
Optional[log],
|
||||||
|
s.logs.order_by(
|
||||||
|
log.dateCreated.desc() # type: ignore
|
||||||
|
).first(),
|
||||||
|
)
|
||||||
|
if b:
|
||||||
|
results.append(s.to_dict() | b.to_dict())
|
||||||
|
|
||||||
|
return jsonify(results)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/favicon.svg")
|
||||||
|
def favicon():
|
||||||
|
return send_file("/static/favicon.svg")
|
||||||
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 191 B After Width: | Height: | Size: 191 B |
|
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |
|
Before Width: | Height: | Size: 680 B After Width: | Height: | Size: 680 B |
|
Before Width: | Height: | Size: 550 B After Width: | Height: | Size: 550 B |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 389 B After Width: | Height: | Size: 389 B |
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 7.8 KiB After Width: | Height: | Size: 7.8 KiB |
|
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 5.4 KiB |
|
Before Width: | Height: | Size: 8.9 KiB After Width: | Height: | Size: 8.9 KiB |
|
Before Width: | Height: | Size: 950 B After Width: | Height: | Size: 950 B |
|
Before Width: | Height: | Size: 642 B After Width: | Height: | Size: 642 B |
@@ -82,10 +82,10 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="position-relative ratio ratio-1x1">
|
<div class="position-relative ratio ratio-1x1">
|
||||||
<div class="d-flex justify-content-center align-items-center">
|
<div class="d-flex justify-content-center align-items-center">
|
||||||
<img src="static/icons/${s.service_id - 1}.svg" class="img-fluid w-75">
|
<img src="{{ url_for('static', filename="icons") }}/${s.service_id - 1}.svg" class="img-fluid w-75">
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
${s.public_access ? `` : `<img src='static/lock.svg' class='img-fluid position-absolute bottom-0 end-0 w-25'>`}
|
${s.public_access ? `` : `<img src='{{ url_for('static', filename='lock.svg') }}' class='img-fluid position-absolute bottom-0 end-0 w-25'>`}
|
||||||
<div class="position-absolute bottom-0 text-body bg-dark bg-opacity-75 px-1 rounded">${s.ping ? s.ping + "ms" : ""}</div>
|
<div class="position-absolute bottom-0 text-body bg-dark bg-opacity-75 px-1 rounded">${s.ping ? s.ping + "ms" : ""}</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
from typing import Any
|
|
||||||
from flask import Flask
|
|
||||||
from flask_sqlalchemy import SQLAlchemy
|
|
||||||
from flask_migrate import Migrate
|
|
||||||
|
|
||||||
|
|
||||||
class service:
|
|
||||||
id: int
|
|
||||||
url: str
|
|
||||||
online: bool
|
|
||||||
public: bool
|
|
||||||
ping_type: int
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: int,
|
|
||||||
url: str = "",
|
|
||||||
label: str = "",
|
|
||||||
public: bool = True,
|
|
||||||
ping_type: int = 0,
|
|
||||||
):
|
|
||||||
self.id = id
|
|
||||||
self.url = url
|
|
||||||
self.public = public
|
|
||||||
self.label = label
|
|
||||||
self.ping_type = ping_type
|
|
||||||
|
|
||||||
def to_dict(self) -> dict[str, Any]:
|
|
||||||
return {
|
|
||||||
"url": self.url,
|
|
||||||
"public": self.public,
|
|
||||||
"label": self.label,
|
|
||||||
"id": self.id,
|
|
||||||
"ping_type": self.ping_type,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
services: list[service] = [
|
|
||||||
service(0, "https://git.ihatemen.uk/", "Gitea"),
|
|
||||||
service(1, "https://plex.ihatemen.uk/", "Plex"),
|
|
||||||
service(2, "https://truenas.local/", "TrueNAS", False),
|
|
||||||
service(3, "https://cloud.ihatemen.uk/", "NextCloud"),
|
|
||||||
service(4, "https://request.ihatemen.uk/", "Overseerr"),
|
|
||||||
service(5, "https://id.ihatemen.uk/", "PocketID"),
|
|
||||||
service(6, "https://tautulli.local/", "Tautulli", False),
|
|
||||||
service(
|
|
||||||
7, "https://transmission.local/", "Transmission", False, ping_type=1
|
|
||||||
),
|
|
||||||
service(8, "https://vault.ihatemen.uk/", "Vault Warden"),
|
|
||||||
service(9, "https://nginx.local/", "Nginx (NPM)", False),
|
|
||||||
service(10, "https://app.ihatemen.uk/", "Kcal Counter"),
|
|
||||||
service(
|
|
||||||
id=11, url="https://unifi.local/", label="Unifi Server", public=False
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Flask app to serve status
|
|
||||||
app = Flask(__name__)
|
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///app.db"
|
|
||||||
|
|
||||||
db = SQLAlchemy(app=app)
|
|
||||||
migration = Migrate(app=app, db=db)
|
|
||||||
114
poll_services.py
@@ -1,114 +0,0 @@
|
|||||||
from mem import db, app
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
from models import log, service
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
from config import timeout as timeout_
|
|
||||||
from typing import Optional
|
|
||||||
from types import SimpleNamespace
|
|
||||||
|
|
||||||
|
|
||||||
async def ping(
|
|
||||||
client: aiohttp.ClientSession,
|
|
||||||
s: service,
|
|
||||||
ctx: Optional[SimpleNamespace] = None,
|
|
||||||
) -> int:
|
|
||||||
ctx = ctx or SimpleNamespace()
|
|
||||||
match s.ping_method:
|
|
||||||
case 0:
|
|
||||||
r = await client.head(
|
|
||||||
url=s.url,
|
|
||||||
ssl=True if s.public_access else False,
|
|
||||||
allow_redirects=True,
|
|
||||||
trace_request_ctx=ctx, # type: ignore
|
|
||||||
)
|
|
||||||
case 1:
|
|
||||||
r = await client.get(
|
|
||||||
url=s.url,
|
|
||||||
ssl=True if s.public_access else False,
|
|
||||||
allow_redirects=True,
|
|
||||||
trace_request_ctx=ctx, # type: ignore
|
|
||||||
)
|
|
||||||
case _:
|
|
||||||
raise Exception("UNKNOWN PING METHOD")
|
|
||||||
return r.status
|
|
||||||
|
|
||||||
|
|
||||||
async def check_service(client: aiohttp.ClientSession, s: service) -> log:
|
|
||||||
try:
|
|
||||||
ctx = SimpleNamespace()
|
|
||||||
status = await ping(client=client, s=s, ctx=ctx)
|
|
||||||
if status == 200:
|
|
||||||
return log(service_id=s.id, ping=int(ctx.duration_ms))
|
|
||||||
else:
|
|
||||||
return log(service_id=s.id, ping=None)
|
|
||||||
except aiohttp.ConnectionTimeoutError:
|
|
||||||
return log(service_id=s.id, ping=None)
|
|
||||||
except Exception:
|
|
||||||
return log(service_id=s.id, ping=None)
|
|
||||||
|
|
||||||
|
|
||||||
def start_async_loop():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
asyncio.run_coroutine_threadsafe(update_services(loop=loop), loop=loop)
|
|
||||||
loop.run_forever()
|
|
||||||
|
|
||||||
|
|
||||||
def setup_client() -> aiohttp.ClientSession:
|
|
||||||
timeout = aiohttp.client.ClientTimeout(total=timeout_ / 1000)
|
|
||||||
# Each request will get its own context
|
|
||||||
trace_config = aiohttp.TraceConfig()
|
|
||||||
|
|
||||||
async def on_start(
|
|
||||||
session: aiohttp.ClientSession,
|
|
||||||
context: SimpleNamespace,
|
|
||||||
params: aiohttp.TraceRequestStartParams,
|
|
||||||
):
|
|
||||||
ctx = context.trace_request_ctx
|
|
||||||
ctx.start = time.perf_counter() # store per-request
|
|
||||||
|
|
||||||
async def on_end(
|
|
||||||
session: aiohttp.ClientSession,
|
|
||||||
context: SimpleNamespace,
|
|
||||||
params: aiohttp.TraceRequestEndParams,
|
|
||||||
):
|
|
||||||
ctx = context.trace_request_ctx
|
|
||||||
ctx.end = time.perf_counter()
|
|
||||||
ctx.duration_ms = int((ctx.end - ctx.start) * 1000)
|
|
||||||
|
|
||||||
trace_config.on_request_start.append(on_start)
|
|
||||||
trace_config.on_request_end.append(on_end)
|
|
||||||
client = aiohttp.ClientSession(
|
|
||||||
timeout=timeout, auto_decompress=False, trace_configs=[trace_config]
|
|
||||||
)
|
|
||||||
return client
|
|
||||||
|
|
||||||
|
|
||||||
async def update_services(loop: asyncio.AbstractEventLoop):
|
|
||||||
print("Starting service updates...")
|
|
||||||
# Create new session
|
|
||||||
with app.app_context():
|
|
||||||
WorkerSession = sessionmaker(bind=db.engine)
|
|
||||||
|
|
||||||
client = setup_client()
|
|
||||||
|
|
||||||
# Actual update loop
|
|
||||||
while True:
|
|
||||||
session = WorkerSession()
|
|
||||||
sleeptask = asyncio.create_task(asyncio.sleep(timeout_ / 1000 + 1))
|
|
||||||
tasks = [
|
|
||||||
check_service(client=client, s=s)
|
|
||||||
for s in session.query(service).all()
|
|
||||||
]
|
|
||||||
logs = await asyncio.gather(*tasks)
|
|
||||||
await sleeptask
|
|
||||||
try:
|
|
||||||
session.add_all(logs)
|
|
||||||
session.commit()
|
|
||||||
except Exception as e:
|
|
||||||
session.rollback()
|
|
||||||
raise e
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||