This commit is contained in:
parent
9544b0415c
commit
b7e89d9c22
2
.gitignore
vendored
2
.gitignore
vendored
@ -3,3 +3,5 @@ env/*
|
|||||||
.git
|
.git
|
||||||
db/*
|
db/*
|
||||||
app/__pycache__/*
|
app/__pycache__/*
|
||||||
|
migrations/versions/__pycache__/
|
||||||
|
migrations/__pycache__/
|
@ -17,10 +17,6 @@ WORKDIR /app
|
|||||||
# Copy the current directory contents into the container at /app
|
# Copy the current directory contents into the container at /app
|
||||||
COPY . /app
|
COPY . /app
|
||||||
|
|
||||||
# Install any needed packages specified in requirements.txt
|
|
||||||
# If there are no external dependencies, you can skip this step
|
|
||||||
# RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Make port 8080 available to the world outside this container
|
# Make port 8080 available to the world outside this container
|
||||||
EXPOSE "${PORT_NUMBER}"
|
EXPOSE "${PORT_NUMBER}"
|
||||||
|
|
||||||
|
119
alembic.ini
Normal file
119
alembic.ini
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||||
|
script_location = migrations
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to migrations/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
# version_path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
version_path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
@ -4,16 +4,21 @@ from app.config import Config
|
|||||||
from app.database import engine
|
from app.database import engine
|
||||||
from app.models import Base
|
from app.models import Base
|
||||||
from app.views import bp
|
from app.views import bp
|
||||||
|
from app.migrations import run_migrations
|
||||||
|
|
||||||
|
|
||||||
def create_app():
|
def create_app(test_config=None):
|
||||||
app = Flask(__name__)
|
app = Flask(__name__, instance_relative_config=True)
|
||||||
app.config.from_object(Config)
|
app.config.from_object(Config)
|
||||||
|
|
||||||
# Ensure database tables exist
|
# Create database tables if they don't exist
|
||||||
Base.metadata.create_all(engine)
|
Base.metadata.create_all(engine)
|
||||||
|
|
||||||
# Register blueprints
|
# Register blueprints
|
||||||
app.register_blueprint(bp)
|
app.register_blueprint(bp)
|
||||||
|
|
||||||
|
# Run migrations after tables are created
|
||||||
|
with app.app_context():
|
||||||
|
run_migrations(app)
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
@ -4,6 +4,6 @@ import os
|
|||||||
class Config:
|
class Config:
|
||||||
LISTEN_ADDRESS = os.getenv("LISTEN_ADDRESS", "0.0.0.0")
|
LISTEN_ADDRESS = os.getenv("LISTEN_ADDRESS", "0.0.0.0")
|
||||||
LISTEN_PORT = int(os.getenv("LISTEN_PORT", "8080"))
|
LISTEN_PORT = int(os.getenv("LISTEN_PORT", "8080"))
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mpv.db")
|
SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI", "sqlite:///./mpv.db")
|
||||||
MPV_SOCKET = os.getenv("MPV_SOCKET", "/tmp/mpvsocket")
|
MPV_SOCKET = os.getenv("MPV_SOCKET", "/tmp/mpvsocket")
|
||||||
LOGLEVEL = os.getenv("LOGLEVEL", "INFO").strip().upper()
|
LOGLEVEL = os.getenv("LOGLEVEL", "INFO").strip().upper()
|
||||||
|
@ -2,7 +2,7 @@ from sqlalchemy import create_engine
|
|||||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||||
from app.config import Config
|
from app.config import Config
|
||||||
|
|
||||||
engine = create_engine(Config.DATABASE_URL, pool_pre_ping=True)
|
engine = create_engine(Config.SQLALCHEMY_DATABASE_URI, pool_pre_ping=True)
|
||||||
Session = scoped_session(sessionmaker(bind=engine))
|
Session = scoped_session(sessionmaker(bind=engine))
|
||||||
|
|
||||||
|
|
||||||
|
32
app/migrations.py
Normal file
32
app/migrations.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from alembic.config import Config
|
||||||
|
from alembic import command
|
||||||
|
from flask import Blueprint
|
||||||
|
from app.config import Config as AppConfig
|
||||||
|
|
||||||
|
migrations_bp = Blueprint("migrations", __name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations(app):
|
||||||
|
"""Run database migrations"""
|
||||||
|
try:
|
||||||
|
# Create Alembic configuration object
|
||||||
|
alembic_cfg = Config("alembic.ini")
|
||||||
|
|
||||||
|
# Get the database URL from your app's config
|
||||||
|
database_url = AppConfig.SQLALCHEMY_DATABASE_URI
|
||||||
|
print(database_url)
|
||||||
|
|
||||||
|
# Set the SQLAlchemy URL
|
||||||
|
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
|
||||||
|
logging.debug(f"Database URL set to: {database_url}")
|
||||||
|
|
||||||
|
# Run the migration
|
||||||
|
command.upgrade(alembic_cfg, "head")
|
||||||
|
|
||||||
|
logging.info("Database migration completed successfully")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Migration failed: {str(e)}")
|
||||||
|
return False
|
@ -12,6 +12,11 @@ class WatchHistory(Base):
|
|||||||
video_name = Column(String(255), nullable=False)
|
video_name = Column(String(255), nullable=False)
|
||||||
channel_url = Column(String(255), nullable=False)
|
channel_url = Column(String(255), nullable=False)
|
||||||
channel_name = Column(String(255), nullable=False)
|
channel_name = Column(String(255), nullable=False)
|
||||||
|
category = Column(String(100), nullable=True)
|
||||||
|
view_count = Column(Integer, nullable=True)
|
||||||
|
subscriber_count = Column(Integer, nullable=True)
|
||||||
|
thumbnail_url = Column(String(255), nullable=True)
|
||||||
|
upload_date = Column(DateTime, nullable=True)
|
||||||
watch_date = Column(DateTime, nullable=False, server_default=func.now())
|
watch_date = Column(DateTime, nullable=False, server_default=func.now())
|
||||||
created_by = Column(
|
created_by = Column(
|
||||||
String(100), nullable=False, server_default="mpv-youtube-queue-server"
|
String(100), nullable=False, server_default="mpv-youtube-queue-server"
|
||||||
|
88
app/views.py
88
app/views.py
@ -2,6 +2,7 @@ import logging
|
|||||||
|
|
||||||
from flask import Blueprint, g, jsonify, request
|
from flask import Blueprint, g, jsonify, request
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
|
||||||
from app.database import get_db_session
|
from app.database import get_db_session
|
||||||
from app.models import SavedQueue, WatchHistory
|
from app.models import SavedQueue, WatchHistory
|
||||||
@ -61,6 +62,10 @@ def add_video():
|
|||||||
if not all(
|
if not all(
|
||||||
k in data for k in ["video_url", "video_name", "channel_url", "channel_name"]
|
k in data for k in ["video_url", "video_name", "channel_url", "channel_name"]
|
||||||
):
|
):
|
||||||
|
logging.error("Missing required fields")
|
||||||
|
logging.error(
|
||||||
|
"Required fields: video_url, video_name, channel_url, channel_name"
|
||||||
|
)
|
||||||
return jsonify(message="Missing required fields"), 400
|
return jsonify(message="Missing required fields"), 400
|
||||||
|
|
||||||
new_entry = WatchHistory(
|
new_entry = WatchHistory(
|
||||||
@ -68,12 +73,18 @@ def add_video():
|
|||||||
video_name=data["video_name"],
|
video_name=data["video_name"],
|
||||||
channel_url=data["channel_url"],
|
channel_url=data["channel_url"],
|
||||||
channel_name=data["channel_name"],
|
channel_name=data["channel_name"],
|
||||||
|
category=data.get("category") if data.get("category") else None,
|
||||||
|
view_count=data.get("view_count") if data.get("view_count") else None,
|
||||||
|
subscriber_count=data.get("subscribers") if data.get("subscribers") else None,
|
||||||
|
thumbnail_url=data.get("thumbnail_url") if data.get("thumbnail_url") else None,
|
||||||
|
upload_date=data.get("upload_date") if data.get("upload_date") else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session = g.db_session
|
db_session = g.db_session
|
||||||
db_session.add(new_entry)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
logging.debug("Adding video to watch history")
|
||||||
|
db_session.add(new_entry)
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
logging.debug("Video added to watch history")
|
logging.debug("Video added to watch history")
|
||||||
logging.debug(f"URL: {data['video_url']}")
|
logging.debug(f"URL: {data['video_url']}")
|
||||||
@ -99,3 +110,78 @@ def handle_request():
|
|||||||
if send_to_mpv(command):
|
if send_to_mpv(command):
|
||||||
return "URL added to mpv queue", 200
|
return "URL added to mpv queue", 200
|
||||||
return "Failed to add URL to mpv queue", 500
|
return "Failed to add URL to mpv queue", 500
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/migrate_watch_history", methods=["POST"])
|
||||||
|
def migrate_watch_history():
|
||||||
|
db_session = g.db_session
|
||||||
|
engine = db_session.get_bind()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# First check and add missing columns
|
||||||
|
inspector = inspect(engine)
|
||||||
|
existing_columns = [
|
||||||
|
col["name"] for col in inspector.get_columns("watch_history")
|
||||||
|
]
|
||||||
|
|
||||||
|
# Define new columns and their SQL
|
||||||
|
new_columns = {
|
||||||
|
"category": "ALTER TABLE watch_history ADD COLUMN category VARCHAR(100)",
|
||||||
|
"view_count": "ALTER TABLE watch_history ADD COLUMN view_count INTEGER",
|
||||||
|
"subscriber_count": "ALTER TABLE watch_history ADD COLUMN subscriber_count INTEGER",
|
||||||
|
"thumbnail_url": "ALTER TABLE watch_history ADD COLUMN thumbnail_url VARCHAR(255)",
|
||||||
|
"upload_date": "ALTER TABLE watch_history ADD COLUMN upload_date TIMESTAMP",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add missing columns
|
||||||
|
columns_added = []
|
||||||
|
for col_name, sql in new_columns.items():
|
||||||
|
if col_name not in existing_columns:
|
||||||
|
engine.execute(sql)
|
||||||
|
columns_added.append(col_name)
|
||||||
|
logging.info(f"Added column: {col_name}")
|
||||||
|
|
||||||
|
# Now backfill with default values
|
||||||
|
entries = db_session.query(WatchHistory).all()
|
||||||
|
updated_count = 0
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
updated = False
|
||||||
|
# Check and set defaults for new columns if they're None
|
||||||
|
if entry.category is None:
|
||||||
|
entry.category = "Unknown"
|
||||||
|
updated = True
|
||||||
|
if entry.view_count is None:
|
||||||
|
entry.view_count = 0
|
||||||
|
updated = True
|
||||||
|
if entry.subscriber_count is None:
|
||||||
|
entry.subscriber_count = 0
|
||||||
|
updated = True
|
||||||
|
if entry.thumbnail_url is None:
|
||||||
|
entry.thumbnail_url = ""
|
||||||
|
updated = True
|
||||||
|
if entry.upload_date is None:
|
||||||
|
# Set to watch_date as a fallback
|
||||||
|
entry.upload_date = entry.watch_date
|
||||||
|
updated = True
|
||||||
|
|
||||||
|
if updated:
|
||||||
|
updated_count += 1
|
||||||
|
|
||||||
|
db_session.commit()
|
||||||
|
return (
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"message": "Migration completed successfully",
|
||||||
|
"columns_added": columns_added,
|
||||||
|
"records_updated": updated_count,
|
||||||
|
"total_records": len(entries),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
db_session.rollback()
|
||||||
|
logging.error(f"Migration failed: {e}")
|
||||||
|
return jsonify(message=f"Failed to migrate watch history: {str(e)}"), 500
|
||||||
|
@ -9,9 +9,4 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 42069:8080
|
- 42069:8080
|
||||||
env_file: .env
|
env_file: .env
|
||||||
networks:
|
|
||||||
- mpv-youtube-queue-server
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
|
||||||
mpv-youtube-queue-server:
|
|
||||||
external: true
|
|
||||||
|
@ -8,4 +8,4 @@ LOGLEVEL=info
|
|||||||
# postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]
|
# postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]
|
||||||
# sqlite:///path
|
# sqlite:///path
|
||||||
# oracle+oracledb://user:pass@hostname:port[/dbname][?service_name=<service>[&key=value&key=value...]]
|
# oracle+oracledb://user:pass@hostname:port[/dbname][?service_name=<service>[&key=value&key=value...]]
|
||||||
DATABASE_URL=mysql+pymysql://user:password@localhost:3306/mpv
|
SQLALCHEMY_DATABASE_URI=mysql+pymysql://user:password@localhost:3306/mpv
|
||||||
|
83
migrations/env.py
Normal file
83
migrations/env.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool, create_engine
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from app.models import Base
|
||||||
|
from app.config import Config
|
||||||
|
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Set the database URL from our app config
|
||||||
|
config.set_main_option('sqlalchemy.url', Config.SQLALCHEMY_DATABASE_URI)
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Use create_engine() directly with our URL
|
||||||
|
connectable = create_engine(Config.SQLALCHEMY_DATABASE_URI)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection,
|
||||||
|
target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
26
migrations/script.py.mako
Normal file
26
migrations/script.py.mako
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,80 @@
|
|||||||
|
"""add watch history columns
|
||||||
|
|
||||||
|
Revision ID: 8911624d0776
|
||||||
|
Revises:
|
||||||
|
Create Date: 2024-xx-xx xx:xx:xx.xxx
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from datetime import datetime
|
||||||
|
from sqlalchemy.engine.reflection import Inspector
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '8911624d0776'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def column_exists(table, column):
|
||||||
|
# Get inspector
|
||||||
|
conn = op.get_bind()
|
||||||
|
inspector = Inspector.from_engine(conn)
|
||||||
|
columns = [c['name'] for c in inspector.get_columns(table)]
|
||||||
|
return column in columns
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# Add new columns if they don't exist
|
||||||
|
with op.batch_alter_table('watch_history') as batch_op:
|
||||||
|
# Add category column
|
||||||
|
if not column_exists('watch_history', 'category'):
|
||||||
|
batch_op.add_column(sa.Column('category', sa.String(100), nullable=True))
|
||||||
|
|
||||||
|
# Add view_count column
|
||||||
|
if not column_exists('watch_history', 'view_count'):
|
||||||
|
batch_op.add_column(sa.Column('view_count', sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Add subscriber_count column
|
||||||
|
if not column_exists('watch_history', 'subscriber_count'):
|
||||||
|
batch_op.add_column(sa.Column('subscriber_count', sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Add thumbnail_url column
|
||||||
|
if not column_exists('watch_history', 'thumbnail_url'):
|
||||||
|
batch_op.add_column(sa.Column('thumbnail_url', sa.String(255), nullable=True))
|
||||||
|
|
||||||
|
# Add upload_date column
|
||||||
|
if not column_exists('watch_history', 'upload_date'):
|
||||||
|
batch_op.add_column(sa.Column('upload_date', sa.DateTime(), nullable=True))
|
||||||
|
|
||||||
|
# Backfill data
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute(text("""
|
||||||
|
UPDATE watch_history
|
||||||
|
SET category = COALESCE(category, 'Unknown'),
|
||||||
|
view_count = COALESCE(view_count, 0),
|
||||||
|
subscriber_count = COALESCE(subscriber_count, 0),
|
||||||
|
thumbnail_url = COALESCE(thumbnail_url, ''),
|
||||||
|
upload_date = COALESCE(upload_date, watch_date)
|
||||||
|
WHERE category IS NULL
|
||||||
|
OR view_count IS NULL
|
||||||
|
OR subscriber_count IS NULL
|
||||||
|
OR thumbnail_url IS NULL
|
||||||
|
OR upload_date IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.batch_alter_table('watch_history') as batch_op:
|
||||||
|
if column_exists('watch_history', 'upload_date'):
|
||||||
|
batch_op.drop_column('upload_date')
|
||||||
|
if column_exists('watch_history', 'thumbnail_url'):
|
||||||
|
batch_op.drop_column('thumbnail_url')
|
||||||
|
if column_exists('watch_history', 'subscriber_count'):
|
||||||
|
batch_op.drop_column('subscriber_count')
|
||||||
|
if column_exists('watch_history', 'view_count'):
|
||||||
|
batch_op.drop_column('view_count')
|
||||||
|
if column_exists('watch_history', 'category'):
|
||||||
|
batch_op.drop_column('category')
|
@ -10,7 +10,7 @@ Restart=on-failure
|
|||||||
Environment="MPV_SOCKET=/tmp/mpvsocket"
|
Environment="MPV_SOCKET=/tmp/mpvsocket"
|
||||||
Environment="LISTEN_ADDRESS=0.0.0.0"
|
Environment="LISTEN_ADDRESS=0.0.0.0"
|
||||||
Environment="LISTEN_PORT=42069"
|
Environment="LISTEN_PORT=42069"
|
||||||
Environment="DATABASE_URL=mysql+mysqldb://user:password@localhost:3306/mpv"
|
Environment="SQLALCHEMY_DATABASE_URI=mysql+mysqldb://user:password@localhost:3306/mpv"
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
@ -1,18 +1,20 @@
|
|||||||
blinker==1.8.2
|
alembic==1.14.1
|
||||||
|
blinker==1.9.0
|
||||||
cffi==1.17.1
|
cffi==1.17.1
|
||||||
click==8.1.7
|
click==8.1.7
|
||||||
cryptography==43.0.1
|
cryptography==43.0.1
|
||||||
Flask==3.0.3
|
Flask==3.1.0
|
||||||
greenlet==3.1.1
|
greenlet==3.1.1
|
||||||
gunicorn==23.0.0
|
gunicorn==23.0.0
|
||||||
itsdangerous==2.2.0
|
itsdangerous==2.2.0
|
||||||
Jinja2==3.1.4
|
Jinja2==3.1.5
|
||||||
|
Mako==1.3.9
|
||||||
MarkupSafe==2.1.5
|
MarkupSafe==2.1.5
|
||||||
oracledb==2.4.1
|
oracledb==2.5.1
|
||||||
packaging==24.1
|
packaging==24.1
|
||||||
psycopg2-binary==2.9.10
|
psycopg2-binary==2.9.10
|
||||||
pycparser==2.22
|
pycparser==2.22
|
||||||
PyMySQL==1.1.1
|
PyMySQL==1.1.1
|
||||||
SQLAlchemy==2.0.34
|
SQLAlchemy==2.0.38
|
||||||
typing_extensions==4.12.2
|
typing_extensions==4.12.2
|
||||||
Werkzeug==3.0.4
|
Werkzeug==3.1.3
|
||||||
|
Loading…
x
Reference in New Issue
Block a user