mirror of
https://github.com/StefBuwalda/dashboard_test.git
synced 2025-10-30 11:19:58 +00:00
Added a bool timeout to logs. This indicated whether or not the request timed out. Preparing data for the frontend graph is put into it's own function. When data is more than 6 s apart it is considered a break in connectivity and a None point is added.
This commit is contained in:
39
app.py
39
app.py
@@ -1,5 +1,5 @@
|
||||
# import requests as r
|
||||
from flask import jsonify, render_template, send_file
|
||||
from flask import jsonify, render_template, send_file, redirect
|
||||
from poll_services import start_async_loop
|
||||
from mem import services, app, db
|
||||
import threading
|
||||
@@ -8,6 +8,28 @@ from pathlib import Path
|
||||
from models import service, log
|
||||
from typing import Any, Optional, cast
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
def split_graph(logs: list[log]) -> tuple[list[str], list[Optional[int]]]:
|
||||
if len(logs) <= 0:
|
||||
return ([], [])
|
||||
|
||||
x = [logs[0].dateCreated.isoformat()]
|
||||
y = [logs[0].ping]
|
||||
|
||||
for i in range(1, len(logs)):
|
||||
log1 = logs[i]
|
||||
log2 = logs[i - 1]
|
||||
|
||||
if (log1.dateCreated - log2.dateCreated) > timedelta(seconds=6):
|
||||
x.append(log2.dateCreated.isoformat())
|
||||
y.append(None)
|
||||
|
||||
x.append(log1.dateCreated.isoformat())
|
||||
y.append(log1.ping)
|
||||
return (x, y)
|
||||
|
||||
|
||||
# Init and upgrade
|
||||
with app.app_context():
|
||||
@@ -44,11 +66,20 @@ def chart():
|
||||
logs = []
|
||||
s = db.session.query(service).first()
|
||||
if s:
|
||||
logs: list[log] = s.logs.limit(60).all()
|
||||
logs = cast(
|
||||
list[log],
|
||||
s.logs.order_by(log.dateCreated.desc()) # type: ignore
|
||||
.limit(300)
|
||||
.all(),
|
||||
)
|
||||
else:
|
||||
return redirect("/")
|
||||
x, y = split_graph(logs=logs)
|
||||
|
||||
return render_template(
|
||||
"chart.html",
|
||||
dates=[item.dateCreated.isoformat() for item in logs],
|
||||
values=json.dumps([item.ping for item in logs]),
|
||||
dates=x,
|
||||
values=json.dumps(y),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -18,8 +18,7 @@
|
||||
|
||||
<script>
|
||||
|
||||
const chartDates = {{ dates| tojson | safe }}.map(dt => new Date(dt));
|
||||
|
||||
const chartDates = ({{ dates | safe }}).map(dt => new Date(dt));
|
||||
const data = {
|
||||
labels: chartDates,
|
||||
datasets: [{
|
||||
|
||||
37
migrations/versions/3c05315d5b9b_.py
Normal file
37
migrations/versions/3c05315d5b9b_.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 3c05315d5b9b
|
||||
Revises: f87909a4293b
|
||||
Create Date: 2025-09-05 09:48:08.561045
|
||||
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3c05315d5b9b"
|
||||
down_revision = "f87909a4293b"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("log", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"timeout", sa.Boolean(), nullable=False, server_default="false"
|
||||
)
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("log", schema=None) as batch_op:
|
||||
batch_op.drop_column("timeout")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -13,13 +13,17 @@ class log(db.Model):
|
||||
nullable=False,
|
||||
)
|
||||
ping: Optional[int] = db.Column(db.Integer, nullable=True)
|
||||
timeout: bool = db.Column(db.Boolean, nullable=False)
|
||||
|
||||
def __init__(self, service_id: int, ping: Optional[int]):
|
||||
def __init__(
|
||||
self, service_id: int, ping: Optional[int], timeout: bool = False
|
||||
):
|
||||
super().__init__()
|
||||
self.service_id = service_id
|
||||
self.ping = ping
|
||||
|
||||
self.dateCreated = datetime.now(timezone.utc)
|
||||
self.timeout = timeout
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
@@ -27,6 +31,7 @@ class log(db.Model):
|
||||
"service_id": self.service_id,
|
||||
"ping": self.ping,
|
||||
"dateCreated": self.dateCreated,
|
||||
"timeout": self.timeout,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ async def ping(client: aiohttp.ClientSession, s: service) -> int:
|
||||
|
||||
async def check_service(client: aiohttp.ClientSession, s: service) -> log:
|
||||
try:
|
||||
# TODO: Use aiohttp latency timing rather than timing it manually
|
||||
before = time.perf_counter()
|
||||
status = await ping(client=client, s=s)
|
||||
after = time.perf_counter()
|
||||
|
||||
Reference in New Issue
Block a user