update code structure and get session on each query
All checks were successful
Build Docker Image / build (push) Successful in 12m54s
All checks were successful
Build Docker Image / build (push) Successful in 12m54s
This commit is contained in:
parent
410a7c92d9
commit
af0d958b89
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,3 +2,4 @@
|
||||
env/*
|
||||
.git
|
||||
db/*
|
||||
app/__pycache__/*
|
||||
|
@ -15,7 +15,7 @@ RUN apt-get update && apt-get install -y \
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the current directory contents into the container at /app
|
||||
COPY server.py requirements.txt /app/
|
||||
COPY . /app
|
||||
|
||||
# Install any needed packages specified in requirements.txt
|
||||
# If there are no external dependencies, you can skip this step
|
||||
@ -27,5 +27,4 @@ EXPOSE "${PORT_NUMBER}"
|
||||
RUN pip3 install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Run server.py when the container launches
|
||||
# CMD ["python3", "server.py", "--host", "${LISTEN_ADDRESS}", "--port", "${LISTEN_PORT}", "--input-ipc-server", "${MPV_SOCKET}"]
|
||||
CMD gunicorn --bind "${LISTEN_ADDRESS}":"${LISTEN_PORT}" server:app
|
||||
CMD gunicorn --bind "${LISTEN_ADDRESS}":"${LISTEN_PORT}" run:app
|
||||
|
19
app/__init__.py
Normal file
19
app/__init__.py
Normal file
@ -0,0 +1,19 @@
|
||||
from flask import Flask
|
||||
|
||||
from app.config import Config
|
||||
from app.database import engine
|
||||
from app.models import Base
|
||||
from app.views import bp
|
||||
|
||||
|
||||
def create_app():
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(Config)
|
||||
|
||||
# Ensure database tables exist
|
||||
Base.metadata.create_all(engine)
|
||||
|
||||
# Register blueprints
|
||||
app.register_blueprint(bp)
|
||||
|
||||
return app
|
9
app/config.py
Normal file
9
app/config.py
Normal file
@ -0,0 +1,9 @@
|
||||
import os
|
||||
|
||||
|
||||
class Config:
|
||||
LISTEN_ADDRESS = os.getenv("LISTEN_ADDRESS", "0.0.0.0")
|
||||
LISTEN_PORT = int(os.getenv("LISTEN_PORT", "8080"))
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mpv.db")
|
||||
MPV_SOCKET = os.getenv("MPV_SOCKET", "/tmp/mpvsocket")
|
||||
LOGLEVEL = os.getenv("LOGLEVEL", "INFO").strip().upper()
|
14
app/database.py
Normal file
14
app/database.py
Normal file
@ -0,0 +1,14 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
from app.config import Config
|
||||
|
||||
engine = create_engine(Config.DATABASE_URL, pool_pre_ping=True)
|
||||
Session = scoped_session(sessionmaker(bind=engine))
|
||||
|
||||
|
||||
def get_db_session():
|
||||
return Session()
|
||||
|
||||
|
||||
def close_db_session(exception=None):
|
||||
Session.remove()
|
28
app/models.py
Normal file
28
app/models.py
Normal file
@ -0,0 +1,28 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.orm import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class WatchHistory(Base):
|
||||
__tablename__ = "watch_history"
|
||||
whid = Column(Integer, primary_key=True, autoincrement=True)
|
||||
video_url = Column(String(255), nullable=False)
|
||||
video_name = Column(String(255), nullable=False)
|
||||
channel_url = Column(String(255), nullable=False)
|
||||
channel_name = Column(String(255), nullable=False)
|
||||
watch_date = Column(DateTime, nullable=False, server_default=func.now())
|
||||
created_by = Column(
|
||||
String(100), nullable=False, server_default="mpv-youtube-queue-server"
|
||||
)
|
||||
|
||||
|
||||
class SavedQueue(Base):
|
||||
__tablename__ = "saved_queue"
|
||||
sqid = Column(Integer, primary_key=True, autoincrement=True)
|
||||
video_url = Column(String(255), nullable=False)
|
||||
created_date = Column(DateTime, nullable=False, server_default=func.now())
|
||||
created_by = Column(
|
||||
String(100), nullable=False, server_default="mpv-youtube-queue-server"
|
||||
)
|
27
app/mpv.py
Normal file
27
app/mpv.py
Normal file
@ -0,0 +1,27 @@
|
||||
import socket
|
||||
import time
|
||||
import logging
|
||||
from app.config import Config
|
||||
|
||||
SOCKET_RETRY_DELAY = 5
|
||||
MAX_RETRIES = 10
|
||||
|
||||
|
||||
def send_to_mpv(command):
|
||||
attempts = 0
|
||||
while attempts < MAX_RETRIES:
|
||||
try:
|
||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
|
||||
client_socket.connect(Config.MPV_SOCKET)
|
||||
client_socket.sendall(command.encode("utf-8"))
|
||||
logging.info("Command sent to mpv successfully.")
|
||||
return True
|
||||
except socket.error as e:
|
||||
attempts += 1
|
||||
logging.error(
|
||||
f"Failed to connect to socket (attempt {attempts}/{MAX_RETRIES}): {e}. Retrying in {SOCKET_RETRY_DELAY} seconds..."
|
||||
)
|
||||
time.sleep(SOCKET_RETRY_DELAY)
|
||||
|
||||
logging.error(f"Exceeded maximum retries ({MAX_RETRIES}). Ignoring the request.")
|
||||
return False
|
101
app/views.py
Normal file
101
app/views.py
Normal file
@ -0,0 +1,101 @@
|
||||
import logging
|
||||
|
||||
from flask import Blueprint, g, jsonify, request
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from app.database import get_db_session
|
||||
from app.models import SavedQueue, WatchHistory
|
||||
from app.mpv import send_to_mpv
|
||||
|
||||
bp = Blueprint("views", __name__)
|
||||
|
||||
|
||||
@bp.before_request
|
||||
def before_request():
|
||||
g.db_session = get_db_session()
|
||||
|
||||
|
||||
@bp.teardown_app_request
|
||||
def shutdown_session(exception=None):
|
||||
if hasattr(g, "db_session"):
|
||||
g.db_session.close()
|
||||
|
||||
|
||||
@bp.route("/save_queue", methods=["POST"])
|
||||
def save_queue():
|
||||
data = request.get_json()
|
||||
if not data or "urls" not in data:
|
||||
return jsonify(message="Invalid JSON data"), 400
|
||||
|
||||
db_session = g.db_session
|
||||
db_session.query(SavedQueue).delete()
|
||||
|
||||
for url in data["urls"]:
|
||||
new_entry = SavedQueue(video_url=url)
|
||||
db_session.add(new_entry)
|
||||
|
||||
try:
|
||||
db_session.commit()
|
||||
return jsonify(message="Data added to saved queue"), 200
|
||||
except SQLAlchemyError as e:
|
||||
db_session.rollback()
|
||||
logging.error(f"Failed to insert data: {e}")
|
||||
return jsonify(message="Database error"), 500
|
||||
|
||||
|
||||
@bp.route("/load_queue", methods=["GET"])
|
||||
def load_queue():
|
||||
"""
|
||||
Retrieves the saved queue of video URLs.
|
||||
"""
|
||||
logging.debug("Loading saved queue")
|
||||
db_session = g.db_session
|
||||
urls = [entry.video_url for entry in db_session.query(SavedQueue).all()]
|
||||
logging.debug(f"Loaded {len(urls)} URLs from the saved queue")
|
||||
return jsonify(urls), 200
|
||||
|
||||
|
||||
@bp.route("/add_video", methods=["POST"])
|
||||
def add_video():
|
||||
data = request.get_json()
|
||||
if not all(
|
||||
k in data for k in ["video_url", "video_name", "channel_url", "channel_name"]
|
||||
):
|
||||
return jsonify(message="Missing required fields"), 400
|
||||
|
||||
new_entry = WatchHistory(
|
||||
video_url=data["video_url"],
|
||||
video_name=data["video_name"],
|
||||
channel_url=data["channel_url"],
|
||||
channel_name=data["channel_name"],
|
||||
)
|
||||
|
||||
db_session = g.db_session
|
||||
db_session.add(new_entry)
|
||||
|
||||
try:
|
||||
db_session.commit()
|
||||
logging.debug("Video added to watch history")
|
||||
logging.debug(f"URL: {data['video_url']}")
|
||||
logging.debug(f"Video name: {data['video_name']}")
|
||||
logging.debug(f"Channel URL: {data['channel_url']}")
|
||||
logging.debug(f"Channel name: {data['channel_name']}")
|
||||
return jsonify(message="Video added"), 200
|
||||
except SQLAlchemyError as e:
|
||||
db_session.rollback()
|
||||
logging.error(f"Database error: {e}")
|
||||
return jsonify(message="Failed to add video"), 500
|
||||
|
||||
|
||||
@bp.route("/", methods=["GET"])
|
||||
def handle_request():
|
||||
video_url = request.args.get("url")
|
||||
if not video_url:
|
||||
return "Missing 'url' parameter", 400
|
||||
|
||||
command = (
|
||||
f'{{"command": ["script-message", "add_to_youtube_queue", "{video_url}"]}}\n'
|
||||
)
|
||||
if send_to_mpv(command):
|
||||
return "URL added to mpv queue", 200
|
||||
return "Failed to add URL to mpv queue", 500
|
17
compose.yml
Normal file
17
compose.yml
Normal file
@ -0,0 +1,17 @@
|
||||
---
|
||||
services:
|
||||
mpv-youtube-queue-server:
|
||||
build: .
|
||||
container_name: mpv-youtube-queue-server
|
||||
user: 1000:1000
|
||||
volumes:
|
||||
- /tmp:/tmp
|
||||
ports:
|
||||
- 42069:8080
|
||||
env_file: .env
|
||||
networks:
|
||||
- mpv-youtube-queue-server
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
mpv-youtube-queue-server:
|
||||
external: true
|
@ -1,31 +0,0 @@
|
||||
---
|
||||
services:
|
||||
mpv-youtube-queue-server:
|
||||
build: .
|
||||
image: mpv-youtube-queue-server:latest
|
||||
container_name: mpv-youtube-queue-server
|
||||
user: 1000:1000
|
||||
volumes:
|
||||
- /tmp:/tmp
|
||||
ports:
|
||||
- 42069:8080
|
||||
env_file: .env
|
||||
networks:
|
||||
- mpv-youtube-queue-server
|
||||
restart: unless-stopped
|
||||
db:
|
||||
image: lscr.io/linuxserver/mariadb:latest
|
||||
container_name: mpv-youtube-queue-db
|
||||
networks:
|
||||
- mpv-youtube-queue-server
|
||||
volumes:
|
||||
- ./db:/config
|
||||
environment:
|
||||
- MYSQL_DATABASE=mpv
|
||||
- MYSQL_USER=mpvuser
|
||||
- MYSQL_PASSWORD=SecretPassword
|
||||
ports:
|
||||
- 3306:3306
|
||||
networks:
|
||||
mpv-youtube-queue-server:
|
||||
external: true
|
@ -4,8 +4,8 @@ After=network.target
|
||||
|
||||
[Service]
|
||||
User=<USER>
|
||||
WorkingDirectory=<PATH_TO_PYTHON_SCRIPT>
|
||||
ExecStart=<PATH_TO_PYTHON> <PATH_TO_PYTHON_SCRIPT>
|
||||
WorkingDirectory=<PATH_TO_PYTHON_SCRIPT_DIRECTORY>
|
||||
ExecStart=<PATH_TO_PYTHON> <PATH_TO_PYTHON_RUN_SCRIPT>
|
||||
Restart=on-failure
|
||||
Environment="MPV_SOCKET=/tmp/mpvsocket"
|
||||
Environment="LISTEN_ADDRESS=0.0.0.0"
|
||||
|
@ -3,14 +3,14 @@ cffi==1.17.1
|
||||
click==8.1.7
|
||||
cryptography==43.0.1
|
||||
Flask==3.0.3
|
||||
greenlet==3.0.3
|
||||
greenlet==3.1.1
|
||||
gunicorn==23.0.0
|
||||
itsdangerous==2.2.0
|
||||
Jinja2==3.1.4
|
||||
MarkupSafe==2.1.5
|
||||
oracledb==2.4.1
|
||||
packaging==24.1
|
||||
psycopg2-binary==2.9.9
|
||||
psycopg2-binary==2.9.10
|
||||
pycparser==2.22
|
||||
PyMySQL==1.1.1
|
||||
SQLAlchemy==2.0.34
|
||||
|
12
run.py
Executable file
12
run.py
Executable file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
import logging
|
||||
|
||||
from app import create_app
|
||||
from app.config import Config
|
||||
|
||||
logging.basicConfig(level=getattr(logging, Config.LOGLEVEL))
|
||||
|
||||
app = create_app()
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host=Config.LISTEN_ADDRESS, port=Config.LISTEN_PORT)
|
196
server.py
196
server.py
@ -1,196 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
from urllib import parse
|
||||
|
||||
from flask import Flask, jsonify, request
|
||||
from sqlalchemy import Column, DateTime, Integer, String, create_engine, exc
|
||||
from sqlalchemy.orm import declarative_base, sessionmaker
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
|
||||
# Flask app
|
||||
app = Flask(__name__)
|
||||
|
||||
# Flask logging configuration to use the same logger as the rest of the app
|
||||
app.logger.handlers = logging.getLogger().handlers
|
||||
app.logger.setLevel(logging.getLogger().level)
|
||||
|
||||
SOCKET_RETRY_DELAY = 5 # Time in seconds between retries to connect to the socket
|
||||
MAX_RETRIES = 10 # Maximum number of retries to connect to the socket
|
||||
|
||||
# Configuration from environment variables
|
||||
LISTEN_ADDRESS = os.getenv("LISTEN_ADDRESS", "0.0.0.0")
|
||||
LISTEN_PORT = int(os.getenv("LISTEN_PORT", "8080"))
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mpv.db")
|
||||
MPV_SOCKET: str = os.getenv("MPV_SOCKET", "/tmp/mpvsocket")
|
||||
LOGLEVEL = os.getenv("LOGLEVEL", "INFO").strip().upper()
|
||||
|
||||
if LOGLEVEL == "DEBUG":
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif LOGLEVEL == "WARNING":
|
||||
logging.getLogger().setLevel(logging.WARNING)
|
||||
elif LOGLEVEL == "ERROR":
|
||||
logging.getLogger().setLevel(logging.ERROR)
|
||||
else:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
# Set up SQLAlchemy
|
||||
Base = declarative_base()
|
||||
engine = create_engine(DATABASE_URL)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
|
||||
|
||||
class WatchHistory(Base):
|
||||
__tablename__ = "watch_history"
|
||||
|
||||
whid = Column(Integer, primary_key=True, autoincrement=True)
|
||||
video_url = Column(String(255), nullable=False)
|
||||
video_name = Column(String(255), nullable=False)
|
||||
channel_url = Column(String(255), nullable=False)
|
||||
channel_name = Column(String(255), nullable=False)
|
||||
watch_date = Column(DateTime, nullable=False, server_default=func.now())
|
||||
created_by = Column(
|
||||
String(100), nullable=False, server_default="mpv-youtube-queue-server"
|
||||
)
|
||||
|
||||
|
||||
class SavedQueue(Base):
|
||||
__tablename__ = "saved_queue"
|
||||
sqid = Column(Integer, primary_key=True, autoincrement=True)
|
||||
video_url = Column(String(255), nullable=False)
|
||||
created_date = Column(DateTime, nullable=False, server_default=func.now())
|
||||
created_by = Column(
|
||||
String(100), nullable=False, server_default="mpv-youtube-queue-server"
|
||||
)
|
||||
|
||||
|
||||
# Ensure tables exist
|
||||
Base.metadata.create_all(engine)
|
||||
|
||||
|
||||
def send_to_mpv(command):
|
||||
"""Send a command to the mpv socket, retrying up to MAX_RETRIES times if the socket is not available."""
|
||||
attempts = 0
|
||||
while attempts < MAX_RETRIES:
|
||||
try:
|
||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
|
||||
client_socket.connect(MPV_SOCKET)
|
||||
client_socket.sendall(command.encode("utf-8"))
|
||||
logging.info("Command sent to mpv successfully.")
|
||||
return True
|
||||
except socket.error as e:
|
||||
attempts += 1
|
||||
logging.error(
|
||||
f"Failed to connect to socket (attempt {attempts}/{MAX_RETRIES}): {e}. Retrying in {SOCKET_RETRY_DELAY} seconds..."
|
||||
)
|
||||
time.sleep(SOCKET_RETRY_DELAY)
|
||||
|
||||
logging.error(f"Exceeded maximum retries ({MAX_RETRIES}). Ignoring the request.")
|
||||
return False
|
||||
|
||||
|
||||
@app.route("/save_queue", methods=["POST"])
|
||||
def save_queue():
|
||||
data = request.get_json()
|
||||
if data is None or "urls" not in data:
|
||||
logging.error("Invalid JSON data")
|
||||
return jsonify(message="Invalid JSON data"), 400
|
||||
logging.debug(f"Received data: {data}")
|
||||
urls = data.get("urls")
|
||||
logging.debug("Truncating saved queue")
|
||||
session.query(SavedQueue).delete()
|
||||
for url in urls:
|
||||
logging.debug(f"Adding {url} to the saved queue")
|
||||
new_entry = SavedQueue(video_url=url)
|
||||
try:
|
||||
session.add(new_entry)
|
||||
except exc.SQLAlchemyError as e:
|
||||
logging.error(f"Failed to insert data into database: {e}")
|
||||
return jsonify(message="Failed to add data to database"), 500
|
||||
session.commit()
|
||||
return jsonify(message="Data added to saved queue"), 200
|
||||
|
||||
|
||||
@app.route("/load_queue", methods=["GET"])
|
||||
def load_queue():
|
||||
logging.debug("Loading saved queue")
|
||||
urls = [entry.video_url for entry in session.query(SavedQueue).all()]
|
||||
logging.debug(f"Loaded {len(urls)} URLs from the saved queue")
|
||||
return jsonify(urls), 200
|
||||
|
||||
|
||||
@app.route("/add_video", methods=["POST"])
|
||||
def add_video():
|
||||
data = request.get_json()
|
||||
|
||||
if data:
|
||||
video_url: str = data.get("video_url")
|
||||
video_name: str = data.get("video_name")
|
||||
channel_url: str = data.get("channel_url")
|
||||
channel_name: str = data.get("channel_name")
|
||||
|
||||
if video_url and video_name and channel_url and channel_name:
|
||||
logging.debug(f"Received data: {data}")
|
||||
|
||||
try:
|
||||
new_entry = WatchHistory(
|
||||
video_url=video_url,
|
||||
video_name=video_name,
|
||||
channel_url=channel_url,
|
||||
channel_name=channel_name,
|
||||
)
|
||||
session.add(new_entry)
|
||||
session.commit()
|
||||
logging.debug(
|
||||
f"{video_name} by {channel_name} inserted into the database successfully"
|
||||
)
|
||||
return jsonify(message="Data added to mpv queue and database"), 200
|
||||
except exc.SQLAlchemyError as e:
|
||||
session.rollback()
|
||||
logging.error(f"Failed to insert data into database: {e}")
|
||||
return jsonify(message="Failed to add data to database"), 500
|
||||
else:
|
||||
logging.error("Missing required data fields")
|
||||
return jsonify(message="Missing required data fields"), 400
|
||||
else:
|
||||
logging.error("Invalid JSON data")
|
||||
return jsonify(message="Invalid JSON data"), 400
|
||||
|
||||
|
||||
@app.route("/", methods=["GET"])
|
||||
def handle_request():
|
||||
video_url = request.args.get("url")
|
||||
if video_url:
|
||||
video_url = parse.unquote(video_url) # Decode the URL
|
||||
logging.info(f"Received URL: {video_url}")
|
||||
|
||||
# Create the command to send to mpv
|
||||
command = f'{{"command": ["script-message", "add_to_youtube_queue", "{video_url}"]}}\n'
|
||||
|
||||
# Try to send the command to mpv
|
||||
if send_to_mpv(command):
|
||||
return "URL added to mpv queue", 200
|
||||
else:
|
||||
return "Failed to add URL to mpv queue after max retries", 500
|
||||
else:
|
||||
logging.error("Missing 'url' parameter")
|
||||
return "Missing 'url' parameter", 400
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.info(f"Starting server on {LISTEN_ADDRESS}:{LISTEN_PORT}...")
|
||||
try:
|
||||
app.run(host=LISTEN_ADDRESS, port=LISTEN_PORT)
|
||||
except Exception as e:
|
||||
logging.exception(f"Error occurred: {e}")
|
||||
except KeyboardInterrupt:
|
||||
logging.info("Server is shutting down...")
|
||||
logging.info("Server stopped.")
|
Loading…
x
Reference in New Issue
Block a user