A bunch of mess

This commit is contained in:
2025-04-10 21:01:25 +02:00
parent 02979e2cee
commit 5de2df71a9
14 changed files with 261 additions and 6 deletions

Binary file not shown.

Binary file not shown.

View File

@@ -1 +0,0 @@
from flask_sqlalchemy import SQLAlchemy

BIN
instance/data.sqlite Normal file

Binary file not shown.

1
migrations/README Normal file
View File

@@ -0,0 +1 @@
Single-database configuration for Flask.

Binary file not shown.

50
migrations/alembic.ini Normal file
View File

@@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

113
migrations/env.py Normal file
View File

@@ -0,0 +1,113 @@
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives
connectable = get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,33 @@
"""empty message
Revision ID: 7a26306b04df
Revises:
Create Date: 2025-04-10 20:56:03.819230
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7a26306b04df'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('plate',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('plate', sa.String(length=40), nullable=False),
sa.Column('dateLogged', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('plate')
# ### end Alembic commands ###

View File

@@ -1,3 +1,4 @@
alembic==1.15.2
anyascii==0.3.2 anyascii==0.3.2
blinker==1.9.0 blinker==1.9.0
certifi==2025.1.31 certifi==2025.1.31
@@ -10,8 +11,12 @@ cycler==0.12.1
defusedxml==0.7.1 defusedxml==0.7.1
filelock==3.18.0 filelock==3.18.0
Flask==3.1.0 Flask==3.1.0
Flask-Login==0.6.3
Flask-Migrate==4.1.0
Flask-SQLAlchemy==3.1.1
fonttools==4.57.0 fonttools==4.57.0
fsspec==2025.3.2 fsspec==2025.3.2
greenlet==3.1.1
h5py==3.13.0 h5py==3.13.0
huggingface-hub==0.30.2 huggingface-hub==0.30.2
idna==3.10 idna==3.10
@@ -20,6 +25,7 @@ Jinja2==3.1.6
kiwisolver==1.4.8 kiwisolver==1.4.8
langdetect==1.0.9 langdetect==1.0.9
loguru==0.7.2 loguru==0.7.2
Mako==1.3.10
MarkupSafe==3.0.2 MarkupSafe==3.0.2
matplotlib==3.10.1 matplotlib==3.10.1
mpmath==1.3.0 mpmath==1.3.0
@@ -46,6 +52,7 @@ seaborn==0.13.2
setuptools==78.1.0 setuptools==78.1.0
shapely==2.1.0 shapely==2.1.0
six==1.17.0 six==1.17.0
SQLAlchemy==2.0.40
sympy==1.13.1 sympy==1.13.1
torch==2.6.0 torch==2.6.0
torchvision==0.21.0 torchvision==0.21.0

View File

@@ -1,20 +1,42 @@
from flask import Flask, request, jsonify from flask import Flask, request, jsonify
import os from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from datetime import datetime
from pyplatex import ANPR from pyplatex import ANPR
import torch
from ultralytics.nn.tasks import DetectionModel from ultralytics.nn.tasks import DetectionModel
import os
import torch
import asyncio import asyncio
torch.serialization.add_safe_globals({"DetectionModel": DetectionModel}) torch.serialization.add_safe_globals({"DetectionModel": DetectionModel})
# Web Server # Web Server
app = Flask(__name__) app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///data.sqlite"
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["SECRET_KEY"] = "bvjchsygvduycgsyugc"
# ORM
db = SQLAlchemy()
db.init_app(app)
migrate = Migrate(app, db)
# Saving images locally # Saving images locally
UPLOAD_FOLDER = "uploads" UPLOAD_FOLDER = "uploads"
os.makedirs(UPLOAD_FOLDER, exist_ok=True) os.makedirs(UPLOAD_FOLDER, exist_ok=True)
# db classes
class Plate(db.Model):
id = db.Column(db.Integer, primary_key=True)
plate = db.Column(db.String(40), nullable=False)
class LoggedItem(Plate):
dateLogged = db.Column(db.DateTime, default=datetime.now)
# Default app route # Default app route
@app.route("/") @app.route("/")
def home(): def home():
@@ -37,12 +59,13 @@ def data():
if file.filename.lower().endswith(".jpg"): if file.filename.lower().endswith(".jpg"):
filepath = os.path.join(UPLOAD_FOLDER, file.filename) filepath = os.path.join(UPLOAD_FOLDER, file.filename)
file.save(filepath) file.save(filepath)
print(asyncio.run(process_image(filepath))) plate = asyncio.run(process_image(filepath))
return jsonify( return jsonify(
{ {
"message": "File uploaded successfully", "message": "File uploaded successfully",
"filename": file.filename, "filename": file.filename,
"status": True, "status": True,
"anpr": plate,
} }
) )
@@ -51,8 +74,13 @@ def data():
async def process_image(file: str): async def process_image(file: str):
anpr = ANPR() anpr = ANPR()
plates = await anpr.detect(file) anpr_info = await anpr.detect(file)
return plates number_plate = anpr_info["plate_number"]
if number_plate:
db.session.add(LoggedItem(plate=number_plate))
db.session.commit()
return number_plate
return "ERROR"
if __name__ == "__main__": if __name__ == "__main__":

0
views.py Normal file
View File