fix logging
All checks were successful
Build Docker Image / build (push) Successful in 13m47s

This commit is contained in:
sudacode 2025-02-15 00:35:41 -08:00
parent b7e89d9c22
commit b7aa64935f
Signed by: sudacode
SSH Key Fingerprint: SHA256:lT5C2bB398DcX6daCF/gYFNSTK3y+Du3oTGUnYzfTEw
8 changed files with 59 additions and 47 deletions

View File

@ -1 +1 @@
0.2.0
0.2.1

View File

@ -96,7 +96,7 @@ keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
qualname = root
[logger_sqlalchemy]
level = WARNING

View File

@ -1,16 +1,30 @@
"""init module for the Flask application."""
import logging
from flask import Flask
from app.config import Config
from app.database import engine
from app.migrations import run_migrations
from app.models import Base
from app.views import bp
from app.migrations import run_migrations
def create_app(test_config=None):
def create_app():
"""Create and configure an instance of the Flask application."""
app = Flask(__name__, instance_relative_config=True)
app.config.from_object(Config)
# Set up Flask logger
if not app.logger.handlers:
handler = logging.StreamHandler()
handler.setFormatter(
logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
)
app.logger.addHandler(handler)
app.logger.setLevel(logging.DEBUG)
# Create database tables if they don't exist
Base.metadata.create_all(engine)
@ -21,4 +35,6 @@ def create_app(test_config=None):
with app.app_context():
run_migrations(app)
app.logger.info("Application initialized successfully")
return app

View File

@ -1,8 +1,9 @@
import logging
import os
from alembic.config import Config
"""Migrations module"""
from alembic import command
from alembic.config import Config
from flask import Blueprint
from app.config import Config as AppConfig
migrations_bp = Blueprint("migrations", __name__)
@ -20,13 +21,13 @@ def run_migrations(app):
# Set the SQLAlchemy URL
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
logging.debug(f"Database URL set to: {database_url}")
app.logger.debug(f"Database URL set to: {database_url}")
# Run the migration
command.upgrade(alembic_cfg, "head")
logging.info("Database migration completed successfully")
app.logger.info("Database migration completed successfully")
return True
except Exception as e:
logging.error(f"Migration failed: {str(e)}")
app.logger.error(f"Migration failed: {str(e)}")
return False

View File

@ -1,6 +1,10 @@
"""Send commands to mpv via its UNIX socket."""
import socket
import time
import logging
from flask import current_app
from app.config import Config
SOCKET_RETRY_DELAY = 5
@ -14,14 +18,16 @@ def send_to_mpv(command):
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
client_socket.connect(Config.MPV_SOCKET)
client_socket.sendall(command.encode("utf-8"))
logging.info("Command sent to mpv successfully.")
current_app.logger.info("Command sent to mpv successfully.")
return True
except socket.error as e:
attempts += 1
logging.error(
current_app.logger.error(
f"Failed to connect to socket (attempt {attempts}/{MAX_RETRIES}): {e}. Retrying in {SOCKET_RETRY_DELAY} seconds..."
)
time.sleep(SOCKET_RETRY_DELAY)
logging.error(f"Exceeded maximum retries ({MAX_RETRIES}). Ignoring the request.")
current_app.logger.error(
f"Exceeded maximum retries ({MAX_RETRIES}). Ignoring the request."
)
return False

View File

@ -1,8 +1,8 @@
import logging
"""Views for the Flask app."""
from flask import Blueprint, g, jsonify, request
from sqlalchemy.exc import SQLAlchemyError
from flask import Blueprint, current_app, g, jsonify, request
from sqlalchemy import inspect
from sqlalchemy.exc import SQLAlchemyError
from app.database import get_db_session
from app.models import SavedQueue, WatchHistory
@ -40,7 +40,7 @@ def save_queue():
return jsonify(message="Data added to saved queue"), 200
except SQLAlchemyError as e:
db_session.rollback()
logging.error(f"Failed to insert data: {e}")
current_app.logger.error(f"Failed to insert data: {e}")
return jsonify(message="Database error"), 500
@ -49,10 +49,10 @@ def load_queue():
"""
Retrieves the saved queue of video URLs.
"""
logging.debug("Loading saved queue")
current_app.logger.debug("Loading saved queue")
db_session = g.db_session
urls = [entry.video_url for entry in db_session.query(SavedQueue).all()]
logging.debug(f"Loaded {len(urls)} URLs from the saved queue")
current_app.logger.debug(f"Loaded {len(urls)} URLs from the saved queue")
return jsonify(urls), 200
@ -62,8 +62,8 @@ def add_video():
if not all(
k in data for k in ["video_url", "video_name", "channel_url", "channel_name"]
):
logging.error("Missing required fields")
logging.error(
current_app.logger.error("Missing required fields")
current_app.logger.error(
"Required fields: video_url, video_name, channel_url, channel_name"
)
return jsonify(message="Missing required fields"), 400
@ -83,18 +83,15 @@ def add_video():
db_session = g.db_session
try:
logging.debug("Adding video to watch history")
current_app.logger.debug("Adding video to watch history")
db_session.add(new_entry)
db_session.commit()
logging.debug("Video added to watch history")
logging.debug(f"URL: {data['video_url']}")
logging.debug(f"Video name: {data['video_name']}")
logging.debug(f"Channel URL: {data['channel_url']}")
logging.debug(f"Channel name: {data['channel_name']}")
current_app.logger.debug("Video added to watch history")
current_app.logger.debug("Data: %s", data)
return jsonify(message="Video added"), 200
except SQLAlchemyError as e:
db_session.rollback()
logging.error(f"Database error: {e}")
current_app.logger.error(f"Database error: {e}")
return jsonify(message="Failed to add video"), 500
@ -139,7 +136,7 @@ def migrate_watch_history():
if col_name not in existing_columns:
engine.execute(sql)
columns_added.append(col_name)
logging.info(f"Added column: {col_name}")
current_app.logger.info(f"Added column: {col_name}")
# Now backfill with default values
entries = db_session.query(WatchHistory).all()
@ -183,5 +180,5 @@ def migrate_watch_history():
except SQLAlchemyError as e:
db_session.rollback()
logging.error(f"Migration failed: {e}")
current_app.logger.error(f"Migration failed: {e}")
return jsonify(message=f"Failed to migrate watch history: {str(e)}"), 500

View File

@ -1,24 +1,22 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool, create_engine
from alembic import context
from app.models import Base
from app.config import Config
from sqlalchemy import create_engine
from app.config import Config
from app.models import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Set the database URL from our app config
config.set_main_option('sqlalchemy.url', Config.SQLALCHEMY_DATABASE_URI)
config.set_main_option("sqlalchemy.url", Config.SQLALCHEMY_DATABASE_URI)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# if config.config_file_name is not None:
# fileConfig(config.config_file_name, disable_existing_loggers=False)
# add your model's MetaData object here
# for 'autogenerate' support
@ -68,10 +66,7 @@ def run_migrations_online() -> None:
connectable = create_engine(Config.SQLALCHEMY_DATABASE_URI)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()

3
run.py
View File

@ -1,11 +1,8 @@
#!/usr/bin/env python3
import logging
from app import create_app
from app.config import Config
logging.basicConfig(level=getattr(logging, Config.LOGLEVEL))
app = create_app()
if __name__ == "__main__":