V0.2.0
All checks were successful
Build Docker Image / build (push) Successful in 12m39s

This commit is contained in:
2025-02-13 17:46:15 -08:00
parent 9544b0415c
commit b7e89d9c22
17 changed files with 455 additions and 24 deletions

View File

@@ -4,16 +4,21 @@ from app.config import Config
from app.database import engine
from app.models import Base
from app.views import bp
from app.migrations import run_migrations
def create_app():
app = Flask(__name__)
def create_app(test_config=None):
app = Flask(__name__, instance_relative_config=True)
app.config.from_object(Config)
# Ensure database tables exist
# Create database tables if they don't exist
Base.metadata.create_all(engine)
# Register blueprints
app.register_blueprint(bp)
# Run migrations after tables are created
with app.app_context():
run_migrations(app)
return app

View File

@@ -4,6 +4,6 @@ import os
class Config:
LISTEN_ADDRESS = os.getenv("LISTEN_ADDRESS", "0.0.0.0")
LISTEN_PORT = int(os.getenv("LISTEN_PORT", "8080"))
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mpv.db")
SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI", "sqlite:///./mpv.db")
MPV_SOCKET = os.getenv("MPV_SOCKET", "/tmp/mpvsocket")
LOGLEVEL = os.getenv("LOGLEVEL", "INFO").strip().upper()

View File

@@ -2,7 +2,7 @@ from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from app.config import Config
engine = create_engine(Config.DATABASE_URL, pool_pre_ping=True)
engine = create_engine(Config.SQLALCHEMY_DATABASE_URI, pool_pre_ping=True)
Session = scoped_session(sessionmaker(bind=engine))

32
app/migrations.py Normal file
View File

@@ -0,0 +1,32 @@
import logging
import os
from alembic.config import Config
from alembic import command
from flask import Blueprint
from app.config import Config as AppConfig
migrations_bp = Blueprint("migrations", __name__)
def run_migrations(app):
"""Run database migrations"""
try:
# Create Alembic configuration object
alembic_cfg = Config("alembic.ini")
# Get the database URL from your app's config
database_url = AppConfig.SQLALCHEMY_DATABASE_URI
print(database_url)
# Set the SQLAlchemy URL
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
logging.debug(f"Database URL set to: {database_url}")
# Run the migration
command.upgrade(alembic_cfg, "head")
logging.info("Database migration completed successfully")
return True
except Exception as e:
logging.error(f"Migration failed: {str(e)}")
return False

View File

@@ -12,6 +12,11 @@ class WatchHistory(Base):
video_name = Column(String(255), nullable=False)
channel_url = Column(String(255), nullable=False)
channel_name = Column(String(255), nullable=False)
category = Column(String(100), nullable=True)
view_count = Column(Integer, nullable=True)
subscriber_count = Column(Integer, nullable=True)
thumbnail_url = Column(String(255), nullable=True)
upload_date = Column(DateTime, nullable=True)
watch_date = Column(DateTime, nullable=False, server_default=func.now())
created_by = Column(
String(100), nullable=False, server_default="mpv-youtube-queue-server"

View File

@@ -2,6 +2,7 @@ import logging
from flask import Blueprint, g, jsonify, request
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import inspect
from app.database import get_db_session
from app.models import SavedQueue, WatchHistory
@@ -61,6 +62,10 @@ def add_video():
if not all(
k in data for k in ["video_url", "video_name", "channel_url", "channel_name"]
):
logging.error("Missing required fields")
logging.error(
"Required fields: video_url, video_name, channel_url, channel_name"
)
return jsonify(message="Missing required fields"), 400
new_entry = WatchHistory(
@@ -68,12 +73,18 @@ def add_video():
video_name=data["video_name"],
channel_url=data["channel_url"],
channel_name=data["channel_name"],
category=data.get("category") if data.get("category") else None,
view_count=data.get("view_count") if data.get("view_count") else None,
subscriber_count=data.get("subscribers") if data.get("subscribers") else None,
thumbnail_url=data.get("thumbnail_url") if data.get("thumbnail_url") else None,
upload_date=data.get("upload_date") if data.get("upload_date") else None,
)
db_session = g.db_session
db_session.add(new_entry)
try:
logging.debug("Adding video to watch history")
db_session.add(new_entry)
db_session.commit()
logging.debug("Video added to watch history")
logging.debug(f"URL: {data['video_url']}")
@@ -99,3 +110,78 @@ def handle_request():
if send_to_mpv(command):
return "URL added to mpv queue", 200
return "Failed to add URL to mpv queue", 500
@bp.route("/migrate_watch_history", methods=["POST"])
def migrate_watch_history():
db_session = g.db_session
engine = db_session.get_bind()
try:
# First check and add missing columns
inspector = inspect(engine)
existing_columns = [
col["name"] for col in inspector.get_columns("watch_history")
]
# Define new columns and their SQL
new_columns = {
"category": "ALTER TABLE watch_history ADD COLUMN category VARCHAR(100)",
"view_count": "ALTER TABLE watch_history ADD COLUMN view_count INTEGER",
"subscriber_count": "ALTER TABLE watch_history ADD COLUMN subscriber_count INTEGER",
"thumbnail_url": "ALTER TABLE watch_history ADD COLUMN thumbnail_url VARCHAR(255)",
"upload_date": "ALTER TABLE watch_history ADD COLUMN upload_date TIMESTAMP",
}
# Add missing columns
columns_added = []
for col_name, sql in new_columns.items():
if col_name not in existing_columns:
engine.execute(sql)
columns_added.append(col_name)
logging.info(f"Added column: {col_name}")
# Now backfill with default values
entries = db_session.query(WatchHistory).all()
updated_count = 0
for entry in entries:
updated = False
# Check and set defaults for new columns if they're None
if entry.category is None:
entry.category = "Unknown"
updated = True
if entry.view_count is None:
entry.view_count = 0
updated = True
if entry.subscriber_count is None:
entry.subscriber_count = 0
updated = True
if entry.thumbnail_url is None:
entry.thumbnail_url = ""
updated = True
if entry.upload_date is None:
# Set to watch_date as a fallback
entry.upload_date = entry.watch_date
updated = True
if updated:
updated_count += 1
db_session.commit()
return (
jsonify(
{
"message": "Migration completed successfully",
"columns_added": columns_added,
"records_updated": updated_count,
"total_records": len(entries),
}
),
200,
)
except SQLAlchemyError as e:
db_session.rollback()
logging.error(f"Migration failed: {e}")
return jsonify(message=f"Failed to migrate watch history: {str(e)}"), 500