Added logs for each ping. Contains no data yet and estimates 15.7 mil rows a year. Considering adding log aggregation. Moved some folders and files into mem folder due to circular import.
13
app.py
@@ -1,18 +1,11 @@
|
||||
# import requests as r
|
||||
from flask import jsonify, Flask, render_template, send_file
|
||||
from flask import jsonify, render_template, send_file
|
||||
from poll_services import start_async_loop
|
||||
from mem import services
|
||||
from mem import services, app
|
||||
import threading
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_migrate import Migrate, init, upgrade
|
||||
from flask_migrate import init, upgrade
|
||||
from pathlib import Path
|
||||
|
||||
# Flask app to serve status
|
||||
app = Flask(__name__)
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///app.db"
|
||||
|
||||
db = SQLAlchemy(app=app)
|
||||
migration = Migrate(app=app, db=db)
|
||||
|
||||
# Init and upgrade
|
||||
with app.app_context():
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
from typing import Any, Optional
|
||||
from flask import Flask
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_migrate import Migrate
|
||||
|
||||
|
||||
class service:
|
||||
@@ -78,3 +81,10 @@ services: list[service] = [
|
||||
id=11, url="https://unifi.local/", label="Unifi Server", public=False
|
||||
),
|
||||
]
|
||||
|
||||
# Flask app to serve status
|
||||
app = Flask(__name__)
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///app.db"
|
||||
|
||||
db = SQLAlchemy(app=app)
|
||||
migration = Migrate(app=app, db=db)
|
||||
|
||||
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 191 B After Width: | Height: | Size: 191 B |
|
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |
|
Before Width: | Height: | Size: 680 B After Width: | Height: | Size: 680 B |
|
Before Width: | Height: | Size: 550 B After Width: | Height: | Size: 550 B |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 389 B After Width: | Height: | Size: 389 B |
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 7.8 KiB After Width: | Height: | Size: 7.8 KiB |
|
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 5.4 KiB |
|
Before Width: | Height: | Size: 8.9 KiB After Width: | Height: | Size: 8.9 KiB |
|
Before Width: | Height: | Size: 950 B After Width: | Height: | Size: 950 B |
|
Before Width: | Height: | Size: 642 B After Width: | Height: | Size: 642 B |
32
migrations/versions/d7d380435347_.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: d7d380435347
|
||||
Revises:
|
||||
Create Date: 2025-09-02 08:43:16.682424
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd7d380435347'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('log',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('dateCreated', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('log')
|
||||
# ### end Alembic commands ###
|
||||
10
models.py
@@ -1,6 +1,12 @@
|
||||
from app import db
|
||||
from mem import db
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
class logs(db.Model):
|
||||
class log(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
dateCreated = db.Column(db.DateTime, nullable=False)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.dateCreated = datetime.now(timezone.utc)
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from mem import services, service
|
||||
from mem import services, service, db, app
|
||||
import httpx
|
||||
import asyncio
|
||||
import time
|
||||
from models import log
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
async def check_service(client: httpx.AsyncClient, s: service):
|
||||
async def check_service(client: httpx.AsyncClient, s: service) -> log:
|
||||
try:
|
||||
before = time.perf_counter()
|
||||
match s.ping_type:
|
||||
@@ -22,7 +24,6 @@ async def check_service(client: httpx.AsyncClient, s: service):
|
||||
)
|
||||
case _:
|
||||
raise httpx.HTTPError("Unknown ping type")
|
||||
|
||||
after = time.perf_counter()
|
||||
s.set_error(None)
|
||||
s.set_online(r.status_code == 200)
|
||||
@@ -33,6 +34,7 @@ async def check_service(client: httpx.AsyncClient, s: service):
|
||||
s.set_online(False)
|
||||
s.set_status(None)
|
||||
s.set_ping(None)
|
||||
return log()
|
||||
|
||||
|
||||
def start_async_loop():
|
||||
@@ -44,14 +46,25 @@ def start_async_loop():
|
||||
|
||||
async def update_services(loop: asyncio.AbstractEventLoop):
|
||||
print("Starting service updates...")
|
||||
with app.app_context():
|
||||
WorkerSession = sessionmaker(bind=db.engine)
|
||||
async with (
|
||||
httpx.AsyncClient() as public_client,
|
||||
httpx.AsyncClient(verify=False) as local_client,
|
||||
):
|
||||
while True:
|
||||
session = WorkerSession()
|
||||
tasks = [
|
||||
check_service(public_client if s.public else local_client, s)
|
||||
for s in services
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
logs = await asyncio.gather(*tasks)
|
||||
try:
|
||||
session.add_all(logs)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise e
|
||||
finally:
|
||||
session.close()
|
||||
await asyncio.sleep(2)
|
||||
|
||||