diff --git a/.agents/plugins/marketplace.json b/.agents/plugins/marketplace.json new file mode 100644 index 0000000..55b4353 --- /dev/null +++ b/.agents/plugins/marketplace.json @@ -0,0 +1,20 @@ +{ + "name": "subminer-local", + "interface": { + "displayName": "SubMiner Local" + }, + "plugins": [ + { + "name": "subminer-workflow", + "source": { + "source": "local", + "path": "./plugins/subminer-workflow" + }, + "policy": { + "installation": "AVAILABLE", + "authentication": "ON_INSTALL" + }, + "category": "Productivity" + } + ] +} diff --git a/.agents/skills/subminer-change-verification/SKILL.md b/.agents/skills/subminer-change-verification/SKILL.md index f5dd17a..3a78d55 100644 --- a/.agents/skills/subminer-change-verification/SKILL.md +++ b/.agents/skills/subminer-change-verification/SKILL.md @@ -1,127 +1,22 @@ --- -name: "subminer-change-verification" -description: "Use when working in the SubMiner repo and you need to verify code changes actually work. Covers targeted regression checks during debugging and pre-handoff verification, with cheap-first lane selection for config, docs, launcher/plugin, runtime-compat, and optional real-runtime escalation." +name: 'subminer-change-verification' +description: 'Compatibility shim. Canonical SubMiner change verification workflow now lives in the repo-local subminer-workflow plugin.' --- -# SubMiner Change Verification +# Compatibility Shim -Use this skill for SubMiner code changes. Default to cheap, repo-native verification first. Escalate only when the changed behavior actually depends on Electron, mpv, overlay/window tracking, or other GUI-sensitive runtime behavior. +Canonical source: -## Scripts +- `plugins/subminer-workflow/skills/subminer-change-verification/SKILL.md` -- `scripts/classify_subminer_diff.sh` - - Emits suggested lanes and flags from explicit paths or current git changes. -- `scripts/verify_subminer_change.sh` - - Runs selected lanes, captures artifacts, and writes a compact summary. +Canonical helper scripts: -If you need an explicit installed path, use the directory that contains this `SKILL.md`. The helper scripts live under: +- `plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh` +- `plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh` -```bash -export SUBMINER_VERIFY_SKILL="" -``` +When this shim is invoked: -## Default workflow - -1. Inspect the changed files or user-requested area. -2. Run the classifier unless you already know the right lane. -3. Run the verifier with the cheapest sufficient lane set. -4. If the classifier emits `flag:real-runtime-candidate`, do not jump straight to runtime verification. First run the non-runtime lanes. -5. Escalate to explicit `--lane real-runtime --allow-real-runtime` only when cheaper lanes cannot validate the behavior claim. -6. Return: - - verification summary - - exact commands run - - artifact paths - - skipped lanes and blockers - -## Quick start - -Repo-source quick start: - -```bash -bash .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh -``` - -Installed-skill quick start: - -```bash -bash "$SUBMINER_VERIFY_SKILL/scripts/classify_subminer_diff.sh" -``` - -Classify explicit files: - -```bash -bash .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh \ - launcher/main.ts \ - plugin/subminer/lifecycle.lua \ - src/main/runtime/mpv-client-runtime-service.ts -``` - -Run automatic lane selection: - -```bash -bash .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh -``` - -Installed-skill form: - -```bash -bash "$SUBMINER_VERIFY_SKILL/scripts/verify_subminer_change.sh" -``` - -Run targeted lanes: - -```bash -bash .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh \ - --lane launcher-plugin \ - --lane runtime-compat -``` - -Dry-run to inspect planned commands and artifact layout: - -```bash -bash .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh \ - --dry-run \ - launcher/main.ts \ - src/main.ts -``` - -## Lane guidance - -- `docs` - - For `docs-site/`, `docs/`, and doc-only edits. -- `config` - - For `src/config/` and config-template-sensitive edits. -- `core` - - For general source changes where `typecheck` + `test:fast` is the best cheap signal. -- `launcher-plugin` - - For `launcher/`, `plugin/subminer/`, plugin gating scripts, and wrapper/mpv routing work. -- `runtime-compat` - - For `src/main*`, runtime/composer wiring, mpv/overlay services, window trackers, and dist-sensitive behavior. -- `real-runtime` - - Only after deliberate escalation. - -## Real Runtime Escalation - -Escalate only when the change claim depends on actual runtime behavior, for example: - -- overlay appears, hides, or tracks a real mpv window -- mpv launch flags or pause-until-ready behavior -- plugin/socket/auto-start handshake under a real player -- macOS/window-tracker/focus-sensitive behavior - -If the environment cannot support authoritative runtime verification, report the blocker explicitly. Do not silently downgrade a runtime-required claim to a pass. - -## Artifact contract - -The verifier writes under `.tmp/skill-verification//`: - -- `summary.json` -- `summary.txt` -- `classification.txt` -- `env.txt` -- `lanes.txt` -- `steps.tsv` -- `steps/*.stdout.log` -- `steps/*.stderr.log` - -On failure, quote the exact failing command and point at the artifact directory. +1. Read the canonical plugin-owned skill. +2. Follow the plugin-owned skill as the source of truth. +3. Use the wrapper scripts in this shim directory only for compatibility with existing commands, docs, and backlog history. +4. Do not duplicate workflow changes here; update the plugin-owned skill and scripts instead. diff --git a/.agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh b/.agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh index a983ff3..4c7acce 100755 --- a/.agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh +++ b/.agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh @@ -1,163 +1,13 @@ #!/usr/bin/env bash set -euo pipefail -usage() { - cat <<'EOF' -Usage: classify_subminer_diff.sh [path ...] +SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +REPO_ROOT=$(cd "$SCRIPT_DIR/../../../.." && pwd) +TARGET="$REPO_ROOT/plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh" -Emit suggested verification lanes for explicit paths or current local git changes. - -Output format: - lane: - flag: - reason: -EOF -} - -has_item() { - local needle=$1 - shift || true - local item - for item in "$@"; do - if [[ "$item" == "$needle" ]]; then - return 0 - fi - done - return 1 -} - -add_lane() { - local lane=$1 - if ! has_item "$lane" "${LANES[@]:-}"; then - LANES+=("$lane") - fi -} - -add_flag() { - local flag=$1 - if ! has_item "$flag" "${FLAGS[@]:-}"; then - FLAGS+=("$flag") - fi -} - -add_reason() { - REASONS+=("$1") -} - -collect_git_paths() { - local top_level - if ! top_level=$(git rev-parse --show-toplevel 2>/dev/null); then - return 0 - fi - - ( - cd "$top_level" - if git rev-parse --verify HEAD >/dev/null 2>&1; then - git diff --name-only --relative HEAD -- - git diff --name-only --relative --cached -- - else - git diff --name-only --relative -- - git diff --name-only --relative --cached -- - fi - git ls-files --others --exclude-standard - ) | awk 'NF' | sort -u -} - -if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then - usage - exit 0 +if [[ ! -x "$TARGET" ]]; then + echo "Missing canonical script: $TARGET" >&2 + exit 1 fi -declare -a PATHS=() -declare -a LANES=() -declare -a FLAGS=() -declare -a REASONS=() - -if [[ $# -gt 0 ]]; then - while [[ $# -gt 0 ]]; do - PATHS+=("$1") - shift - done -else - while IFS= read -r line; do - [[ -n "$line" ]] && PATHS+=("$line") - done < <(collect_git_paths) -fi - -if [[ ${#PATHS[@]} -eq 0 ]]; then - add_lane "core" - add_reason "no changed paths detected -> default to core" -fi - -for path in "${PATHS[@]}"; do - specialized=0 - - case "$path" in - docs-site/*|docs/*|changes/*|README.md) - add_lane "docs" - add_reason "$path -> docs" - specialized=1 - ;; - esac - - case "$path" in - src/config/*|src/generate-config-example.ts|src/verify-config-example.ts|docs-site/public/config.example.jsonc|config.example.jsonc) - add_lane "config" - add_reason "$path -> config" - specialized=1 - ;; - esac - - case "$path" in - launcher/*|plugin/subminer/*|plugin/subminer.conf|scripts/test-plugin-*|scripts/get-mpv-window-*|scripts/configure-plugin-binary-path.mjs) - add_lane "launcher-plugin" - add_reason "$path -> launcher-plugin" - add_flag "real-runtime-candidate" - add_reason "$path -> real-runtime-candidate" - specialized=1 - ;; - esac - - case "$path" in - src/main.ts|src/main-entry.ts|src/preload.ts|src/main/*|src/core/services/mpv*|src/core/services/overlay*|src/renderer/*|src/window-trackers/*|scripts/prepare-build-assets.mjs) - add_lane "runtime-compat" - add_reason "$path -> runtime-compat" - add_flag "real-runtime-candidate" - add_reason "$path -> real-runtime-candidate" - specialized=1 - ;; - esac - - if [[ "$specialized" == "0" ]]; then - case "$path" in - src/*|package.json|tsconfig*.json|scripts/*|Makefile) - add_lane "core" - add_reason "$path -> core" - ;; - esac - fi - - case "$path" in - package.json|src/main.ts|src/main-entry.ts|src/preload.ts) - add_flag "broad-impact" - add_reason "$path -> broad-impact" - ;; - esac -done - -if [[ ${#LANES[@]} -eq 0 ]]; then - add_lane "core" - add_reason "no lane-specific matches -> default to core" -fi - -for lane in "${LANES[@]}"; do - printf 'lane:%s\n' "$lane" -done - -for flag in "${FLAGS[@]}"; do - printf 'flag:%s\n' "$flag" -done - -for reason in "${REASONS[@]}"; do - printf 'reason:%s\n' "$reason" -done +exec "$TARGET" "$@" diff --git a/.agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh b/.agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh index d34dc8f..58cdd64 100755 --- a/.agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh +++ b/.agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh @@ -1,566 +1,13 @@ #!/usr/bin/env bash set -euo pipefail -usage() { - cat <<'EOF' -Usage: verify_subminer_change.sh [options] [path ...] - -Options: - --lane Force a verification lane. Repeatable. - --artifact-dir Use an explicit artifact directory. - --allow-real-runtime Allow explicit real-runtime execution. - --allow-real-gui Deprecated alias for --allow-real-runtime. - --dry-run Record planned steps without executing commands. - --help Show this help text. - -If no lanes are supplied, the script classifies the provided paths. If no paths are -provided, it classifies the current local git changes. - -Authoritative real-runtime verification should be requested with explicit path -arguments instead of relying on inferred local git changes. -EOF -} - -timestamp() { - date +%Y%m%d-%H%M%S -} - -timestamp_iso() { - date -u +%Y-%m-%dT%H:%M:%SZ -} - -generate_session_id() { - local tmp_dir - tmp_dir=$(mktemp -d "${TMPDIR:-/tmp}/subminer-verify-$(timestamp)-XXXXXX") - basename "$tmp_dir" - rmdir "$tmp_dir" -} - -has_item() { - local needle=$1 - shift || true - local item - for item in "$@"; do - if [[ "$item" == "$needle" ]]; then - return 0 - fi - done - return 1 -} - -normalize_lane_name() { - case "$1" in - real-gui) - printf '%s' "real-runtime" - ;; - *) - printf '%s' "$1" - ;; - esac -} - -add_lane() { - local lane - lane=$(normalize_lane_name "$1") - if ! has_item "$lane" "${SELECTED_LANES[@]:-}"; then - SELECTED_LANES+=("$lane") - fi -} - -add_blocker() { - BLOCKERS+=("$1") - BLOCKED=1 -} - -append_step_record() { - printf '%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' \ - "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" >>"$STEPS_TSV" -} - -record_env() { - { - printf 'repo_root=%s\n' "$REPO_ROOT" - printf 'session_id=%s\n' "$SESSION_ID" - printf 'artifact_dir=%s\n' "$ARTIFACT_DIR" - printf 'path_selection_mode=%s\n' "$PATH_SELECTION_MODE" - printf 'dry_run=%s\n' "$DRY_RUN" - printf 'allow_real_runtime=%s\n' "$ALLOW_REAL_RUNTIME" - printf 'session_home=%s\n' "$SESSION_HOME" - printf 'session_xdg_config_home=%s\n' "$SESSION_XDG_CONFIG_HOME" - printf 'session_mpv_dir=%s\n' "$SESSION_MPV_DIR" - printf 'session_logs_dir=%s\n' "$SESSION_LOGS_DIR" - printf 'session_mpv_log=%s\n' "$SESSION_MPV_LOG" - printf 'pwd=%s\n' "$(pwd)" - git rev-parse --short HEAD 2>/dev/null | sed 's/^/git_head=/' || true - git status --short 2>/dev/null || true - if [[ ${#PATH_ARGS[@]} -gt 0 ]]; then - printf 'requested_paths=\n' - printf ' %s\n' "${PATH_ARGS[@]}" - fi - } >"$ARTIFACT_DIR/env.txt" -} - -run_step() { - local lane=$1 - local name=$2 - local command=$3 - local note=${4:-} - local slug=${name//[^a-zA-Z0-9_-]/-} - local stdout_rel="steps/${slug}.stdout.log" - local stderr_rel="steps/${slug}.stderr.log" - local stdout_path="$ARTIFACT_DIR/$stdout_rel" - local stderr_path="$ARTIFACT_DIR/$stderr_rel" - local status exit_code - - COMMANDS_RUN+=("$command") - printf '%s\n' "$command" >"$ARTIFACT_DIR/steps/${slug}.command.txt" - - if [[ "$DRY_RUN" == "1" ]]; then - printf '[dry-run] %s\n' "$command" >"$stdout_path" - : >"$stderr_path" - status="dry-run" - exit_code=0 - else - if bash -lc "cd \"$REPO_ROOT\" && $command" >"$stdout_path" 2>"$stderr_path"; then - status="passed" - exit_code=0 - EXECUTED_REAL_STEPS=1 - else - exit_code=$? - status="failed" - FAILED=1 - fi - fi - - append_step_record "$lane" "$name" "$status" "$exit_code" "$command" "$stdout_rel" "$stderr_rel" "$note" - printf '%s\t%s\t%s\n' "$lane" "$name" "$status" - - if [[ "$status" == "failed" ]]; then - FAILURE_STEP="$name" - FAILURE_COMMAND="$command" - FAILURE_STDOUT="$stdout_rel" - FAILURE_STDERR="$stderr_rel" - return "$exit_code" - fi -} - -record_nonpassing_step() { - local lane=$1 - local name=$2 - local status=$3 - local note=$4 - local slug=${name//[^a-zA-Z0-9_-]/-} - local stdout_rel="steps/${slug}.stdout.log" - local stderr_rel="steps/${slug}.stderr.log" - printf '%s\n' "$note" >"$ARTIFACT_DIR/$stdout_rel" - : >"$ARTIFACT_DIR/$stderr_rel" - append_step_record "$lane" "$name" "$status" "0" "" "$stdout_rel" "$stderr_rel" "$note" - printf '%s\t%s\t%s\n' "$lane" "$name" "$status" -} - -record_skipped_step() { - record_nonpassing_step "$1" "$2" "skipped" "$3" -} - -record_blocked_step() { - add_blocker "$3" - record_nonpassing_step "$1" "$2" "blocked" "$3" -} - -record_failed_step() { - FAILED=1 - FAILURE_STEP=$2 - FAILURE_COMMAND=${FAILURE_COMMAND:-"(validation)"} - FAILURE_STDOUT="steps/${2//[^a-zA-Z0-9_-]/-}.stdout.log" - FAILURE_STDERR="steps/${2//[^a-zA-Z0-9_-]/-}.stderr.log" - add_blocker "$3" - record_nonpassing_step "$1" "$2" "failed" "$3" -} - -find_real_runtime_helper() { - local candidate - for candidate in \ - "$SCRIPT_DIR/run_real_runtime_smoke.sh" \ - "$SCRIPT_DIR/run_real_mpv_smoke.sh"; do - if [[ -x "$candidate" ]]; then - printf '%s' "$candidate" - return 0 - fi - done - return 1 -} - -acquire_real_runtime_lease() { - local lease_root="$REPO_ROOT/.tmp/skill-verification/locks" - local lease_dir="$lease_root/exclusive-real-runtime" - mkdir -p "$lease_root" - if mkdir "$lease_dir" 2>/dev/null; then - REAL_RUNTIME_LEASE_DIR="$lease_dir" - printf '%s\n' "$SESSION_ID" >"$lease_dir/session_id" - return 0 - fi - - local owner="" - if [[ -f "$lease_dir/session_id" ]]; then - owner=$(cat "$lease_dir/session_id") - fi - add_blocker "real-runtime lease already held${owner:+ by $owner}" - return 1 -} - -release_real_runtime_lease() { - if [[ -n "$REAL_RUNTIME_LEASE_DIR" && -d "$REAL_RUNTIME_LEASE_DIR" ]]; then - if [[ -f "$REAL_RUNTIME_LEASE_DIR/session_id" ]]; then - local owner - owner=$(cat "$REAL_RUNTIME_LEASE_DIR/session_id") - if [[ "$owner" != "$SESSION_ID" ]]; then - return 0 - fi - fi - rm -rf "$REAL_RUNTIME_LEASE_DIR" - fi -} - -compute_final_status() { - if [[ "$FAILED" == "1" ]]; then - FINAL_STATUS="failed" - elif [[ "$BLOCKED" == "1" ]]; then - FINAL_STATUS="blocked" - elif [[ "$EXECUTED_REAL_STEPS" == "1" ]]; then - FINAL_STATUS="passed" - else - FINAL_STATUS="skipped" - fi -} - -write_summary_files() { - local lane_lines - lane_lines=$(printf '%s\n' "${SELECTED_LANES[@]}") - printf '%s\n' "$lane_lines" >"$ARTIFACT_DIR/lanes.txt" - printf '%s\n' "${BLOCKERS[@]}" >"$ARTIFACT_DIR/blockers.txt" - printf '%s\n' "${PATH_ARGS[@]}" >"$ARTIFACT_DIR/requested-paths.txt" - - ARTIFACT_DIR_ENV="$ARTIFACT_DIR" \ - SESSION_ID_ENV="$SESSION_ID" \ - FINAL_STATUS_ENV="$FINAL_STATUS" \ - PATH_SELECTION_MODE_ENV="$PATH_SELECTION_MODE" \ - ALLOW_REAL_RUNTIME_ENV="$ALLOW_REAL_RUNTIME" \ - SESSION_HOME_ENV="$SESSION_HOME" \ - SESSION_XDG_CONFIG_HOME_ENV="$SESSION_XDG_CONFIG_HOME" \ - SESSION_MPV_DIR_ENV="$SESSION_MPV_DIR" \ - SESSION_LOGS_DIR_ENV="$SESSION_LOGS_DIR" \ - SESSION_MPV_LOG_ENV="$SESSION_MPV_LOG" \ - STARTED_AT_ENV="$STARTED_AT" \ - FINISHED_AT_ENV="$FINISHED_AT" \ - FAILED_ENV="$FAILED" \ - FAILURE_COMMAND_ENV="${FAILURE_COMMAND:-}" \ - FAILURE_STDOUT_ENV="${FAILURE_STDOUT:-}" \ - FAILURE_STDERR_ENV="${FAILURE_STDERR:-}" \ - bun -e ' - const fs = require("fs"); - const path = require("path"); - - function readLines(filePath) { - if (!fs.existsSync(filePath)) return []; - return fs.readFileSync(filePath, "utf8").split(/\r?\n/).filter(Boolean); - } - - const artifactDir = process.env.ARTIFACT_DIR_ENV; - const reportsDir = path.join(artifactDir, "reports"); - const lanes = readLines(path.join(artifactDir, "lanes.txt")); - const blockers = readLines(path.join(artifactDir, "blockers.txt")); - const requestedPaths = readLines(path.join(artifactDir, "requested-paths.txt")); - const steps = readLines(path.join(artifactDir, "steps.tsv")).map((line) => { - const [lane, name, status, exitCode, command, stdout, stderr, note] = line.split("\t"); - return { - lane, - name, - status, - exitCode: Number(exitCode || 0), - command, - stdout, - stderr, - note, - }; - }); - const summary = { - sessionId: process.env.SESSION_ID_ENV || "", - artifactDir, - reportsDir, - status: process.env.FINAL_STATUS_ENV || "failed", - selectedLanes: lanes, - failed: process.env.FAILED_ENV === "1", - failure: - process.env.FAILED_ENV === "1" - ? { - command: process.env.FAILURE_COMMAND_ENV || "", - stdout: process.env.FAILURE_STDOUT_ENV || "", - stderr: process.env.FAILURE_STDERR_ENV || "", - } - : null, - blockers, - pathSelectionMode: process.env.PATH_SELECTION_MODE_ENV || "git-inferred", - requestedPaths, - allowRealRuntime: process.env.ALLOW_REAL_RUNTIME_ENV === "1", - startedAt: process.env.STARTED_AT_ENV || "", - finishedAt: process.env.FINISHED_AT_ENV || "", - env: { - home: process.env.SESSION_HOME_ENV || "", - xdgConfigHome: process.env.SESSION_XDG_CONFIG_HOME_ENV || "", - mpvDir: process.env.SESSION_MPV_DIR_ENV || "", - logsDir: process.env.SESSION_LOGS_DIR_ENV || "", - mpvLog: process.env.SESSION_MPV_LOG_ENV || "", - }, - steps, - }; - - const summaryJson = JSON.stringify(summary, null, 2) + "\n"; - fs.writeFileSync(path.join(artifactDir, "summary.json"), summaryJson); - fs.writeFileSync(path.join(reportsDir, "summary.json"), summaryJson); - - const lines = []; - lines.push(`session_id: ${summary.sessionId}`); - lines.push(`artifact_dir: ${artifactDir}`); - lines.push(`selected_lanes: ${lanes.join(", ") || "(none)"}`); - lines.push(`status: ${summary.status}`); - lines.push(`path_selection_mode: ${summary.pathSelectionMode}`); - if (requestedPaths.length > 0) { - lines.push(`requested_paths: ${requestedPaths.join(", ")}`); - } - if (blockers.length > 0) { - lines.push(`blockers: ${blockers.join(" | ")}`); - } - for (const step of steps) { - lines.push(`${step.lane}/${step.name}: ${step.status}`); - if (step.command) lines.push(` command: ${step.command}`); - lines.push(` stdout: ${step.stdout}`); - lines.push(` stderr: ${step.stderr}`); - if (step.note) lines.push(` note: ${step.note}`); - } - if (summary.failed) { - lines.push(`failure_command: ${process.env.FAILURE_COMMAND_ENV || ""}`); - } - const summaryText = lines.join("\n") + "\n"; - fs.writeFileSync(path.join(artifactDir, "summary.txt"), summaryText); - fs.writeFileSync(path.join(reportsDir, "summary.txt"), summaryText); - ' -} - -cleanup() { - release_real_runtime_lease -} - -CLASSIFIER_OUTPUT="" -ARTIFACT_DIR="" -ALLOW_REAL_RUNTIME=0 -DRY_RUN=0 -FAILED=0 -BLOCKED=0 -EXECUTED_REAL_STEPS=0 -FINAL_STATUS="" -FAILURE_STEP="" -FAILURE_COMMAND="" -FAILURE_STDOUT="" -FAILURE_STDERR="" -REAL_RUNTIME_LEASE_DIR="" -STARTED_AT="" -FINISHED_AT="" - -declare -a EXPLICIT_LANES=() -declare -a SELECTED_LANES=() -declare -a PATH_ARGS=() -declare -a COMMANDS_RUN=() -declare -a BLOCKERS=() - -while [[ $# -gt 0 ]]; do - case "$1" in - --lane) - EXPLICIT_LANES+=("$(normalize_lane_name "$2")") - shift 2 - ;; - --artifact-dir) - ARTIFACT_DIR=$2 - shift 2 - ;; - --allow-real-runtime|--allow-real-gui) - ALLOW_REAL_RUNTIME=1 - shift - ;; - --dry-run) - DRY_RUN=1 - shift - ;; - --help|-h) - usage - exit 0 - ;; - --) - shift - while [[ $# -gt 0 ]]; do - PATH_ARGS+=("$1") - shift - done - ;; - *) - PATH_ARGS+=("$1") - shift - ;; - esac -done - SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -REPO_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd) -SESSION_ID=$(generate_session_id) -PATH_SELECTION_MODE="git-inferred" -if [[ ${#PATH_ARGS[@]} -gt 0 ]]; then - PATH_SELECTION_MODE="explicit" +REPO_ROOT=$(cd "$SCRIPT_DIR/../../../.." && pwd) +TARGET="$REPO_ROOT/plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh" + +if [[ ! -x "$TARGET" ]]; then + echo "Missing canonical script: $TARGET" >&2 + exit 1 fi -if [[ -z "$ARTIFACT_DIR" ]]; then - mkdir -p "$REPO_ROOT/.tmp/skill-verification" - ARTIFACT_DIR="$REPO_ROOT/.tmp/skill-verification/$SESSION_ID" -fi - -SESSION_HOME="$ARTIFACT_DIR/home" -SESSION_XDG_CONFIG_HOME="$ARTIFACT_DIR/xdg" -SESSION_MPV_DIR="$ARTIFACT_DIR/mpv" -SESSION_LOGS_DIR="$ARTIFACT_DIR/logs" -SESSION_MPV_LOG="$SESSION_LOGS_DIR/mpv.log" - -mkdir -p "$ARTIFACT_DIR/steps" "$ARTIFACT_DIR/reports" "$SESSION_HOME" "$SESSION_XDG_CONFIG_HOME" "$SESSION_MPV_DIR" "$SESSION_LOGS_DIR" -STEPS_TSV="$ARTIFACT_DIR/steps.tsv" -: >"$STEPS_TSV" - -trap cleanup EXIT -STARTED_AT=$(timestamp_iso) - -if [[ ${#EXPLICIT_LANES[@]} -gt 0 ]]; then - local_lane="" - for local_lane in "${EXPLICIT_LANES[@]}"; do - add_lane "$local_lane" - done - printf 'reason:explicit lanes supplied\n' >"$ARTIFACT_DIR/classification.txt" -else - if [[ ${#PATH_ARGS[@]} -gt 0 ]]; then - CLASSIFIER_OUTPUT=$(bash "$SCRIPT_DIR/classify_subminer_diff.sh" "${PATH_ARGS[@]}") - else - CLASSIFIER_OUTPUT=$(bash "$SCRIPT_DIR/classify_subminer_diff.sh") - fi - printf '%s\n' "$CLASSIFIER_OUTPUT" >"$ARTIFACT_DIR/classification.txt" - while IFS= read -r line; do - case "$line" in - lane:*) - add_lane "${line#lane:}" - ;; - esac - done <<<"$CLASSIFIER_OUTPUT" -fi - -record_env - -printf 'artifact_dir=%s\n' "$ARTIFACT_DIR" -printf 'selected_lanes=%s\n' "$(IFS=,; echo "${SELECTED_LANES[*]}")" - -for lane in "${SELECTED_LANES[@]}"; do - case "$lane" in - docs) - run_step "$lane" "docs-test" "bun run docs:test" || break - [[ "$FAILED" == "1" ]] && break - run_step "$lane" "docs-build" "bun run docs:build" || break - ;; - config) - run_step "$lane" "test-config" "bun run test:config" || break - ;; - core) - run_step "$lane" "typecheck" "bun run typecheck" || break - [[ "$FAILED" == "1" ]] && break - run_step "$lane" "test-fast" "bun run test:fast" || break - ;; - launcher-plugin) - run_step "$lane" "launcher-smoke-src" "bun run test:launcher:smoke:src" || break - [[ "$FAILED" == "1" ]] && break - run_step "$lane" "plugin-src" "bun run test:plugin:src" || break - ;; - runtime-compat) - run_step "$lane" "build" "bun run build" || break - [[ "$FAILED" == "1" ]] && break - run_step "$lane" "test-runtime-compat" "bun run test:runtime:compat" || break - [[ "$FAILED" == "1" ]] && break - run_step "$lane" "test-smoke-dist" "bun run test:smoke:dist" || break - ;; - real-runtime) - if [[ "$PATH_SELECTION_MODE" != "explicit" ]]; then - record_blocked_step \ - "$lane" \ - "real-runtime-guard" \ - "real-runtime lane requires explicit paths; inferred local git changes are non-authoritative" - break - fi - - if [[ "$ALLOW_REAL_RUNTIME" != "1" ]]; then - record_blocked_step \ - "$lane" \ - "real-runtime-guard" \ - "real-runtime lane requested but --allow-real-runtime was not supplied" - break - fi - - if ! acquire_real_runtime_lease; then - record_blocked_step \ - "$lane" \ - "real-runtime-lease" \ - "real-runtime lease already held; rerun after the active runtime verification finishes" - break - fi - - if ! REAL_RUNTIME_HELPER=$(find_real_runtime_helper); then - record_blocked_step \ - "$lane" \ - "real-runtime-helper" \ - "real-runtime helper not implemented yet" - break - fi - - printf -v REAL_RUNTIME_COMMAND \ - 'SESSION_ID=%q HOME=%q XDG_CONFIG_HOME=%q SUBMINER_MPV_LOG=%q bash %q' \ - "$SESSION_ID" \ - "$SESSION_HOME" \ - "$SESSION_XDG_CONFIG_HOME" \ - "$SESSION_MPV_LOG" \ - "$REAL_RUNTIME_HELPER" - - run_step "$lane" "real-runtime-smoke" "$REAL_RUNTIME_COMMAND" || break - ;; - *) - record_failed_step "$lane" "lane-validation" "unknown lane: $lane" - break - ;; - esac - - if [[ "$FAILED" == "1" || "$BLOCKED" == "1" ]]; then - break - fi -done - -FINISHED_AT=$(timestamp_iso) -compute_final_status -write_summary_files - -printf 'status=%s\n' "$FINAL_STATUS" -printf 'artifact_dir=%s\n' "$ARTIFACT_DIR" - -case "$FINAL_STATUS" in - failed) - printf 'result=failed\n' - printf 'failure_command=%s\n' "$FAILURE_COMMAND" - exit 1 - ;; - blocked) - printf 'result=blocked\n' - exit 2 - ;; - *) - printf 'result=ok\n' - exit 0 - ;; -esac +exec "$TARGET" "$@" diff --git a/.agents/skills/subminer-scrum-master/SKILL.md b/.agents/skills/subminer-scrum-master/SKILL.md index 75e1308..94dad07 100644 --- a/.agents/skills/subminer-scrum-master/SKILL.md +++ b/.agents/skills/subminer-scrum-master/SKILL.md @@ -1,146 +1,18 @@ --- -name: "subminer-scrum-master" -description: "Use in the SubMiner repo when a request should be turned into planned work and driven through execution. Assesses whether backlog tracking is warranted, creates or updates tasks when needed, records a plan, dispatches one or more subagents, and requires verification before handoff." +name: 'subminer-scrum-master' +description: 'Compatibility shim. Canonical SubMiner scrum-master workflow now lives in the repo-local subminer-workflow plugin.' --- -# SubMiner Scrum Master +# Compatibility Shim -Own workflow, not code by default. +Canonical source: -Use this skill when the user gives a feature request, bug report, issue, refactor, or implementation ask and the agent should manage intake, planning, backlog hygiene, worker dispatch, and verification through completion. +- `plugins/subminer-workflow/skills/subminer-scrum-master/SKILL.md` -## Core Rules +When this shim is invoked: -1. Decide first whether backlog tracking is warranted. -2. If backlog is needed, search first. Update existing work when it clearly matches. -3. If backlog is not needed, keep the process light. Do not invent ticket ceremony. -4. Record a plan before dispatching coding work. -5. Use parent + subtasks for multi-part work when backlog is used. -6. Dispatch conservatively. Parallelize only disjoint write scopes. -7. Require verification before handoff, typically via `subminer-change-verification`. -8. Report backlog actions, dispatched workers, verification, blockers, and remaining risks. +1. Read the canonical plugin-owned skill. +2. Follow the plugin-owned skill as the source of truth. +3. Do not duplicate workflow changes here; update the plugin-owned skill instead. -## Backlog Decision - -Skip backlog when the request is: -- question only -- obvious mechanical edit -- tiny isolated change with no real planning - -Use backlog when the work: -- needs planning or scope decisions -- spans multiple phases or subsystems -- is likely to need subagent dispatch -- should remain traceable for handoff/resume - -If backlog is used: -- search existing tasks first -- create/update a standalone task for one focused deliverable -- create/update a parent task plus subtasks for multi-part work -- record the implementation plan in the task before implementation begins - -## Intake Workflow - -1. Parse the request. - Classify it as question, mechanical edit, bugfix, feature, refactor, investigation, or follow-up. -2. Decide whether backlog is needed. -3. If backlog is needed: - - search first - - update existing task if clearly relevant - - otherwise create the right structure - - write the implementation plan before dispatch -4. If backlog is skipped: - - write a short working plan in-thread - - proceed without fake ticketing -5. Choose execution mode: - - no subagents for trivial work - - one worker for focused work - - parallel workers only for disjoint scopes -6. Run verification before handoff. - -## Dispatch Rules - -The scrum master orchestrates. Workers implement. - -- Do not become the default implementer unless delegation is unnecessary. -- Do not parallelize overlapping files or tightly coupled runtime work. -- Give every worker explicit ownership of files/modules. -- Tell every worker other agents may be active and they must not revert unrelated edits. -- Require each worker to report: - - changed files - - tests run - - blockers - -Use worker agents for implementation and explorer agents only for bounded codebase questions. - -## Verification - -Every nontrivial code task gets verification. - -Preferred flow: -1. use `subminer-change-verification` -2. start with the cheapest sufficient lane -3. escalate only when needed -4. if worker verification is sufficient, accept it or run one final consolidating pass - -Never hand off nontrivial work without stating what was verified and what was skipped. - -## Pre-Handoff Policy Checks (Required) - -Before handoff, always ask and answer both of these questions explicitly: - -1. **Docs update required?** -2. **Changelog fragment required?** - -Rules: -- Do not assume silence implies "no." Record an explicit yes/no decision for each item. -- If the answer is yes, either complete the update or report the blocker before handoff. -- Include the final answers in the handoff summary even when both answers are "no." - -## Failure / Scope Handling - -- If a worker hits ambiguity, pause and ask the user. -- If verification fails, either: - - send the worker back with exact failure context, or - - fix it directly if it is tiny and clearly in scope -- If new scope appears, revisit backlog structure before silently expanding work. - -## Representative Flows - -### Trivial no-ticket work - -- decide backlog is unnecessary -- keep a short plan -- implement directly or with one worker if helpful -- run targeted verification -- report outcome concisely - -### Single-task implementation - -- search/create/update one task -- record plan -- dispatch one worker -- integrate -- verify -- update task and report outcome - -### Parent + subtasks execution - -- search/create/update parent task -- create subtasks for distinct deliverables/phases -- record sequencing in the plan -- dispatch workers only where scopes are disjoint -- integrate -- run consolidated verification -- update task state and report outcome - -## Output Expectations - -At the end, report: -- whether backlog was used and what changed -- which workers were dispatched and what they owned -- what verification ran -- explicit answers to: - - docs update required? - - changelog fragment required? -- blockers, skips, and risks +This shim exists so existing repo references and prompts keep resolving during the migration to the repo-local plugin workflow. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 47aa088..de8830d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -61,6 +61,16 @@ jobs: - name: Test suite (source) run: bun run test:fast + - name: Coverage suite (maintained source lane) + run: bun run test:coverage:src + + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + name: coverage-test-src + path: coverage/test-src/lcov.info + if-no-files-found: error + - name: Launcher smoke suite (source) run: bun run test:launcher:smoke:src diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cd37697..473c098 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -49,6 +49,16 @@ jobs: - name: Test suite (source) run: bun run test:fast + - name: Coverage suite (maintained source lane) + run: bun run test:coverage:src + + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + name: coverage-test-src + path: coverage/test-src/lcov.info + if-no-files-found: error + - name: Launcher smoke suite (source) run: bun run test:launcher:smoke:src diff --git a/.gitignore b/.gitignore index d42112f..396421c 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ out/ dist/ release/ build/yomitan/ +coverage/ # Launcher build artifact (produced by make build-launcher) /subminer diff --git a/AGENTS.md b/AGENTS.md index a6112b9..8f7fa58 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -83,7 +83,6 @@ This project uses Backlog.md MCP for all task and project management activities. - **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work These guides cover: - - Decision framework for when to create tasks - Search-first workflow to avoid duplicates - Links to detailed guides for task creation, execution, and finalization diff --git a/Backlog.md b/Backlog.md new file mode 100644 index 0000000..9beed53 --- /dev/null +++ b/Backlog.md @@ -0,0 +1,266 @@ +# Backlog + +Purpose: lightweight repo-local task board. Seeded with current testing / coverage work. + +Status keys: + +- `todo`: not started +- `doing`: in progress +- `blocked`: waiting +- `done`: shipped + +Priority keys: + +- `P0`: urgent / release-risk +- `P1`: high value +- `P2`: useful cleanup +- `P3`: nice-to-have + +## Active + +| ID | Pri | Status | Area | Title | +| ------ | --- | ------ | -------------- | --------------------------------------------------- | +| SM-013 | P1 | done | review-followup | Address PR #36 CodeRabbit action items | + +## Ready + +| ID | Pri | Status | Area | Title | +| ------ | --- | ------ | ----------------- | ---------------------------------------------------------------- | +| SM-001 | P1 | todo | launcher | Add tests for CLI parser and args normalizer | +| SM-002 | P1 | todo | immersion-tracker | Backfill tests for uncovered query exports | +| SM-003 | P1 | todo | anki | Add focused field-grouping service + merge edge-case tests | +| SM-004 | P2 | todo | tests | Extract shared test utils for deps factories and polling helpers | +| SM-005 | P2 | todo | tests | Strengthen weak assertions in app-ready and IPC tests | +| SM-006 | P2 | todo | tests | Break up monolithic youtube-flow and subtitle-sidebar tests | +| SM-007 | P2 | todo | anilist | Add tests for AniList rate limiter | +| SM-008 | P3 | todo | subtitles | Add core subtitle-position persistence/path tests | +| SM-009 | P3 | todo | tokenizer | Add tests for JLPT token filter | +| SM-010 | P1 | todo | immersion-tracker | Refactor storage + immersion-tracker service into focused modules | +| SM-011 | P1 | done | tests | Add coverage reporting for maintained test lanes | +| SM-012 | P2 | done | config/runtime | Replace JSON serialize-clone helpers with structured cloning | + +## Icebox + +None. + +## Ticket Details + +### SM-001 + +Title: Add tests for CLI parser and args normalizer +Priority: P1 +Status: done +Scope: + +- `launcher/config/cli-parser-builder.ts` +- `launcher/config/args-normalizer.ts` + Acceptance: +- root options parsing covered +- subcommand routing covered +- invalid action / invalid log level / invalid backend cases covered +- target classification covered: file, directory, URL, invalid + +### SM-002 + +Title: Backfill tests for uncovered query exports +Priority: P1 +Status: todo +Scope: + +- `src/core/services/immersion-tracker/query-*.ts` + Targets: +- headword helpers +- anime/media detail helpers not covered by existing wrapper tests +- lexical detail / appearance helpers +- maintenance helpers beyond `deleteSession` and `upsertCoverArt` + Acceptance: +- every exported query helper either directly tested or explicitly justified as covered elsewhere +- at least one focused regression per complex SQL branch / aggregation branch + +### SM-003 + +Title: Add focused field-grouping service + merge edge-case tests +Priority: P1 +Status: todo +Scope: + +- `src/anki-integration/field-grouping.ts` +- `src/anki-integration/field-grouping-merge.ts` + Acceptance: +- auto/manual/disabled flow branches covered +- duplicate-card preview failure path covered +- merge edge cases covered: empty fields, generated media fallback, strict grouped spans, audio synchronization + +### SM-004 + +Title: Extract shared test utils for deps factories and polling helpers +Priority: P2 +Status: todo +Scope: + +- common `makeDeps` / `createDeps` helpers +- common `waitForCondition` + Acceptance: +- shared helper module added +- at least 3 duplicated polling helpers removed +- at least 5 duplicated deps factories consolidated or clearly prepared for follow-up migration + +### SM-005 + +Title: Strengthen weak assertions in app-ready and IPC tests +Priority: P2 +Status: todo +Scope: + +- `src/core/services/app-ready.test.ts` +- `src/core/services/ipc.test.ts` + Acceptance: +- replace broad `assert.ok(...)` presence checks with exact value / order assertions where expected value known +- handler registration tests assert channel-specific behavior, not only existence + +### SM-006 + +Title: Break up monolithic youtube-flow and subtitle-sidebar tests +Priority: P2 +Status: todo +Scope: + +- `src/main/runtime/youtube-flow.test.ts` +- `src/renderer/modals/subtitle-sidebar.test.ts` + Acceptance: +- reduce single-test breadth +- split largest tests into focused cases by behavior +- keep semantics unchanged + +### SM-007 + +Title: Add tests for AniList rate limiter +Priority: P2 +Status: todo +Scope: + +- `src/core/services/anilist/rate-limiter.ts` + Acceptance: +- capacity-window wait behavior covered +- `x-ratelimit-remaining` + reset handling covered +- `retry-after` handling covered + +### SM-008 + +Title: Add core subtitle-position persistence/path tests +Priority: P3 +Status: todo +Scope: + +- `src/core/services/subtitle-position.ts` + Acceptance: +- save/load persistence covered +- fallback behavior covered +- path normalization behavior covered for URL vs local target + +### SM-009 + +Title: Add tests for JLPT token filter +Priority: P3 +Status: todo +Scope: + +- `src/core/services/jlpt-token-filter.ts` + Acceptance: +- excluded term membership covered +- ignored POS1 membership covered +- exported list / entry consistency covered + +### SM-010 + +Title: Refactor storage + immersion-tracker service into focused layers without API changes +Priority: P1 +Status: todo +Scope: + +- `src/core/database/storage/storage.ts` +- `src/core/database/storage/schema.ts` +- `src/core/database/storage/cover-blob.ts` +- `src/core/database/storage/records.ts` +- `src/core/database/storage/write-path.ts` +- `src/core/services/immersion-tracker/youtube.ts` +- `src/core/services/immersion-tracker/youtube-manager.ts` +- `src/core/services/immersion-tracker/write-queue.ts` +- `src/core/services/immersion-tracker/immersion-tracker-service.ts` + +Acceptance: + +- behavior and public API remain unchanged for all callers +- `storage.ts` responsibilities split into DDL/migrations, cover blob helpers, record CRUD, and write-path execution +- `immersion-tracker-service.ts` reduces to session state, media change orchestration, query proxies, and lifecycle +- YouTube code split into pure utilities, a stateful manager (`YouTubeManager`), and a dedicated write queue (`WriteQueue`) +- removed `storage.ts` is replaced with focused modules and updated imports +- no API or migration regressions; existing tests for trackers/storage coverage remain green or receive focused updates + +### SM-011 + +Title: Add coverage reporting for maintained test lanes +Priority: P1 +Status: done +Scope: + +- `package.json` +- CI workflow files under `.github/` +- `docs/workflow/verification.md` + Acceptance: +- at least one maintained test lane emits machine-readable coverage output +- CI surfaces coverage as an artifact, summary, or check output +- local contributor path for coverage is documented +- chosen coverage path works with Bun/TypeScript lanes already maintained by the repo +Implementation note: +- Added `bun run test:coverage:src` for the maintained source lane via a sharded coverage runner, with merged LCOV output at `coverage/test-src/lcov.info` and CI/release artifact upload as `coverage-test-src`. + +### SM-012 + +Title: Replace JSON serialize-clone helpers with structured cloning +Priority: P2 +Status: todo +Scope: + +- `src/runtime-options.ts` +- `src/config/definitions.ts` +- `src/config/service.ts` +- `src/main/controller-config-update.ts` + Acceptance: +- runtime/config clone helpers stop using `JSON.parse(JSON.stringify(...))` +- replacement preserves current behavior for plain config/runtime objects +- focused tests cover clone/merge behavior that could regress during the swap +- no new clone helper is introduced in these paths without a documented reason + +Done: + +- replaced JSON serialize-clone call sites in runtime/config/controller update paths with `structuredClone` +- updated focused tests and fixtures to cover detached clone behavior and guard against regressions + +### SM-013 + +Title: Address PR #36 CodeRabbit action items +Priority: P1 +Status: done +Scope: + +- `plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh` +- `scripts/subminer-change-verification.test.ts` +- `src/core/services/immersion-tracker/query-sessions.ts` +- `src/core/services/immersion-tracker/query-trends.ts` +- `src/core/services/immersion-tracker/maintenance.ts` +- `src/main/boot/services.ts` +- `src/main/character-dictionary-runtime/zip.test.ts` +Acceptance: +- fix valid open CodeRabbit findings on PR #36 +- add focused regression coverage for behavior changes where practical +- verify touched tests plus typecheck stay green + +Done: + +- hardened `--artifact-dir` validation in the verification script +- fixed trend aggregation rounding and monthly ratio bucketing +- preserved unwatched anime episodes in episode queries +- restored seconds-based aggregate timestamps in shared maintenance +- fixed the startup refactor compile break by making the predicates local at the call site +- verified with `bun test src/core/services/immersion-tracker/__tests__/query.test.ts src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts` and `bun run typecheck` diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ece256..873a18a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## v0.10.0 (2026-03-29) + +### Changed +- Integrations: Replaced the deprecated Discord Rich Presence wrapper with the maintained `@xhayper/discord-rpc` package. + +### Fixed +- Stats: Fixed stats startup so the immersion tracker can run when `Bun.serve` is unavailable. +- Stats: Stats server now falls back to a Node `http` listener in Electron/runtime paths that do not expose Bun. +- Overlay: Fixed the macOS visible-overlay toggle path so manual hides stay hidden and the plugin uses the explicit visible-overlay toggle command. +- Subtitle Sidebar: Restored macOS mpv passthrough while the overlay subtitle sidebar is open so clicks outside the sidebar can refocus mpv and keep native keybindings working. + +### Internal +- Release: Added a maintained source coverage lane that shards Bun coverage one test file at a time and merges LCOV output into `coverage/test-src/lcov.info`. +- Release: CI and release quality-gate now upload the merged source-lane LCOV artifact for inspection. +- Runtime: Extracted remaining inline runtime logic from `src/main.ts` into dedicated runtime modules and composer helpers. +- Runtime: Added focused regression tests for the extracted runtime/composer boundaries. +- Runtime: Updated task tracking notes to mark TASK-238.6 complete and confirm follow-on boot-phase split can be deferred. +- Runtime: Split `src/main.ts` boot wiring into dedicated `src/main/boot/services.ts`, `src/main/boot/runtimes.ts`, and `src/main/boot/handlers.ts` modules. +- Runtime: Added focused tests for the new boot-phase seams and kept the startup/typecheck/build verification lanes green. +- Runtime: Updated internal architecture/task docs to record the boot-phase split and new ownership boundary. + ## v0.9.3 (2026-03-25) ### Changed diff --git a/backlog/completed/task-243 - Assess-and-address-PR-36-latest-CodeRabbit-review-round.md b/backlog/completed/task-243 - Assess-and-address-PR-36-latest-CodeRabbit-review-round.md new file mode 100644 index 0000000..cdd612a --- /dev/null +++ b/backlog/completed/task-243 - Assess-and-address-PR-36-latest-CodeRabbit-review-round.md @@ -0,0 +1,35 @@ +--- +id: TASK-243 +title: 'Assess and address PR #36 latest CodeRabbit review round' +status: Done +assignee: [] +created_date: '2026-03-29 07:39' +updated_date: '2026-03-29 07:41' +labels: + - code-review + - pr-36 +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/36' +priority: high +ordinal: 3600 +--- + +## Description + + +Inspect the latest CodeRabbit review round on PR #36, verify each actionable comment against the current branch, implement the confirmed fixes, and verify the touched paths. + + +## Acceptance Criteria + +- [ ] #1 Confirmed review comments are implemented or explicitly deferred with rationale. +- [ ] #2 Touched paths are verified with the smallest sufficient test/build lane. +- [ ] #3 Current PR feedback is reduced to resolved or intentionally deferred suggestions. + + +## Final Summary + + +Addressed the confirmed latest CodeRabbit review items on PR #36. `scripts/run-coverage-lane.ts` now uses the Bun-style `import.meta.main` entrypoint check with a local ts-ignore to preserve the repo's CommonJS typecheck settings. `src/core/services/immersion-tracker/maintenance.ts` no longer shadows the imported `nowMs` helper in retention functions. `src/main.ts` now centralizes the startup-mode predicates behind a shared helper and releases `resolvedSource.cleanup` on the cached-subtitle fast path so materialized sources do not leak. + diff --git a/backlog/completed/task-244 - Assess-and-address-PR-36-latest-CodeRabbit-review-round-2.md b/backlog/completed/task-244 - Assess-and-address-PR-36-latest-CodeRabbit-review-round-2.md new file mode 100644 index 0000000..dd7bcef --- /dev/null +++ b/backlog/completed/task-244 - Assess-and-address-PR-36-latest-CodeRabbit-review-round-2.md @@ -0,0 +1,35 @@ +--- +id: TASK-244 +title: 'Assess and address PR #36 latest CodeRabbit review round 2' +status: Done +assignee: [] +created_date: '2026-03-29 08:09' +updated_date: '2026-03-29 08:10' +labels: + - code-review + - pr-36 +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/36' +priority: high +ordinal: 3610 +--- + +## Description + + +Inspect the newest CodeRabbit review round on PR #36, verify the actionable comment against the current branch, implement the confirmed fix, and verify the touched path. + + +## Acceptance Criteria + +- [ ] #1 The actionable review comment is implemented or explicitly deferred with rationale. +- [ ] #2 Touched path is verified with the smallest sufficient test lane. +- [ ] #3 Current PR feedback is reduced to resolved or intentionally deferred suggestions. + + +## Final Summary + + +Addressed the actionable latest CodeRabbit comment on PR #36. `src/core/services/immersion-tracker/maintenance.ts` now skips retention deletions when a window is disabled with `Infinity`, so `toDbMs(...)` is only called for finite retention values. Added a regression test in `maintenance.test.ts` that verifies disabled retention windows preserve session events, telemetry, and sessions while returning zero deletions. + diff --git a/backlog/config.yml b/backlog/config.yml index f0233bb..cf450d5 100644 --- a/backlog/config.yml +++ b/backlog/config.yml @@ -1,11 +1,11 @@ -project_name: 'SubMiner' -default_status: 'To Do' -statuses: ['To Do', 'In Progress', 'Done'] +project_name: "SubMiner" +default_status: "To Do" +statuses: ["To Do", "In Progress", "Done"] labels: [] definition_of_done: [] date_format: yyyy-mm-dd max_column_width: 20 -default_editor: 'nvim' +default_editor: "nvim" auto_open_browser: false default_port: 6420 remote_operations: true @@ -13,4 +13,4 @@ auto_commit: false bypass_git_hooks: false check_active_branches: true active_branch_days: 30 -task_prefix: 'task' +task_prefix: "task" diff --git a/backlog/milestones/m-2 - mining-workflow-upgrades.md b/backlog/milestones/m-2 - mining-workflow-upgrades.md new file mode 100644 index 0000000..32b1e65 --- /dev/null +++ b/backlog/milestones/m-2 - mining-workflow-upgrades.md @@ -0,0 +1,8 @@ +--- +id: m-2 +title: 'Mining Workflow Upgrades' +--- + +## Description + +Future user-facing workflow improvements that directly improve discoverability, previewability, and mining control without depending on speculative platform integrations like OCR, marketplace infrastructure, or cloud sync. diff --git a/backlog/tasks/task-238 - Codebase-health-follow-up-decompose-remaining-oversized-runtime-surfaces.md b/backlog/tasks/task-238 - Codebase-health-follow-up-decompose-remaining-oversized-runtime-surfaces.md new file mode 100644 index 0000000..0dc3ab9 --- /dev/null +++ b/backlog/tasks/task-238 - Codebase-health-follow-up-decompose-remaining-oversized-runtime-surfaces.md @@ -0,0 +1,59 @@ +--- +id: TASK-238 +title: Codebase health follow-up: decompose remaining oversized runtime surfaces +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - tech-debt + - maintainability + - runtime +milestone: m-0 +dependencies: [] +references: + - src/main.ts + - src/types.ts + - src/main/character-dictionary-runtime.ts + - src/core/services/immersion-tracker/query.ts + - backlog/tasks/task-87 - Codebase-health-harden-verification-and-retire-dead-architecture-identified-in-the-March-2026-review.md + - backlog/completed/task-87.4 - Runtime-composition-root-remove-dead-symbols-and-tighten-module-boundaries-in-src-main.ts.md + - backlog/completed/task-87.6 - Anki-integration-maintainability-continue-decomposing-the-oversized-orchestration-layer.md + - backlog/tasks/task-238.6 - Extract-remaining-inline-runtime-logic-and-composer-gaps-from-src-main.ts.md + - backlog/tasks/task-238.7 - Split-src-main.ts-into-boot-phase-services-runtimes-and-handlers.md +priority: high +--- + +## Description + + +Follow up the March 2026 codebase-health work with a narrower pass over the biggest remaining production hotspots. The latest review correctly flags `src/main.ts` and `src/types.ts` as maintainability pressure, but it also misses the next real large surfaces that will keep slowing future work: `src/main/character-dictionary-runtime.ts` and `src/core/services/immersion-tracker/query.ts`. This parent task should track focused decomposition work that preserves behavior, avoids redoing already-completed dead-architecture cleanup, and keeps each slice small enough for isolated implementation. + + +## Acceptance Criteria + + +- [ ] #1 Child tasks exist for each focused cleanup slice instead of one broad “split the monoliths” effort. +- [ ] #2 The parent task records sequencing so agents do not overlap on `src/main.ts` and other shared surfaces. +- [ ] #3 The selected follow-up tasks target still-live pressure points, not already-completed work like TASK-87.4, TASK-87.5, or TASK-87.6. +- [ ] #4 Completion of the child tasks leaves runtime wiring, shared types, character-dictionary orchestration, and immersion-tracker queries materially easier to review and extend. + + +## Implementation Plan + + +Recommended sequencing: + +1. Start TASK-238.3 first. A compatibility-first type split reduces churn risk for the later runtime/query refactors. +2. Run TASK-238.4 and TASK-238.5 in parallel after TASK-238.3 if desired; they touch different domains. +3. Run TASK-238.1 after or alongside the domain refactors, but keep it focused on window/bootstrap composition only. +4. Run TASK-238.2 after TASK-238.1 because both touch `src/main.ts` and the CLI/headless flow should build on the cleaner composition root. +5. Run TASK-238.6 after the current composer/setup-window-factory work lands, so the remaining inline runtime logic and composer gaps are extracted from the already-cleaned composition root. +6. Run TASK-238.7 only after TASK-238.6 confirms the remaining entrypoint surface still justifies a boot-phase split; then move the boot wiring into dedicated service/runtime/handler modules. + +Shared guardrails: + +- Do not reopen already-completed dead-module cleanup from TASK-87.5 unless new evidence appears. +- Keep `src/types.ts` migration compatibility-first; avoid a repo-wide import churn bomb. +- Prefer extracting named runtime/domain modules over moving code into new giant helper files. +- Verify each slice with the cheapest sufficient lane, then escalate when a task crosses runtime/build boundaries. + diff --git a/backlog/tasks/task-238.1 - Extract-main-window-and-overlay-window-composition-from-src-main.ts.md b/backlog/tasks/task-238.1 - Extract-main-window-and-overlay-window-composition-from-src-main.ts.md new file mode 100644 index 0000000..2a38570 --- /dev/null +++ b/backlog/tasks/task-238.1 - Extract-main-window-and-overlay-window-composition-from-src-main.ts.md @@ -0,0 +1,45 @@ +--- +id: TASK-238.1 +title: Extract main-window and overlay-window composition from src/main.ts +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - tech-debt + - runtime + - windows + - maintainability +milestone: m-0 +dependencies: [] +references: + - src/main.ts + - src/main/runtime/composers + - src/main/runtime/overlay-runtime-bootstrap.ts + - docs/architecture/README.md +parent_task_id: TASK-238 +priority: high +--- + +## Description + + +`src/main.ts` still directly owns several `BrowserWindow` construction and window-lifecycle paths, including overlay-adjacent windows and setup flows. That keeps the composition root far larger than intended and makes window behavior hard to test in isolation. Extract the remaining window/bootstrap composition into named runtime modules so `src/main.ts` mostly wires dependencies and app lifecycle events together. + + +## Acceptance Criteria + + +- [ ] #1 At least the main overlay window path plus two other window/setup flows are extracted from direct `BrowserWindow` construction inside `src/main.ts`. +- [ ] #2 The extracted modules expose narrow factory/handler APIs that can be tested without booting the whole app. +- [ ] #3 `src/main.ts` becomes materially smaller and easier to scan, with window creation concentrated behind well-named runtime surfaces. +- [ ] #4 Relevant runtime/window tests pass, and new tests are added for any newly isolated window composition helpers. + + +## Implementation Plan + + +1. Map the remaining direct `BrowserWindow` creation sites in `src/main.ts` and group them by shared lifecycle concerns. +2. Extract coherent modules for construction, preload/path resolution, and open/focus/reuse behavior rather than moving raw option objects wholesale. +3. Update the composition root to consume the new modules and keep side effects/app state ownership explicit. +4. Verify with focused runtime/window tests plus `bun run typecheck`. + diff --git a/backlog/tasks/task-238.2 - Extract-CLI-and-headless-command-wiring-from-src-main.ts.md b/backlog/tasks/task-238.2 - Extract-CLI-and-headless-command-wiring-from-src-main.ts.md new file mode 100644 index 0000000..d536d68 --- /dev/null +++ b/backlog/tasks/task-238.2 - Extract-CLI-and-headless-command-wiring-from-src-main.ts.md @@ -0,0 +1,46 @@ +--- +id: TASK-238.2 +title: Extract CLI and headless command wiring from src/main.ts +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - tech-debt + - cli + - runtime + - maintainability +milestone: m-0 +dependencies: + - TASK-238.1 +references: + - src/main.ts + - src/main/cli-runtime.ts + - src/cli/args.ts + - launcher +parent_task_id: TASK-238 +priority: high +--- + +## Description + + +`src/main.ts` still owns the headless-initial-command flow, argument handling, and a large amount of CLI/runtime bridging. That makes non-window startup paths difficult to reason about and keeps CLI behavior coupled to unrelated desktop boot logic. Extract the remaining CLI/headless orchestration into dedicated runtime services so the main entrypoint only decides which startup path to invoke. + + +## Acceptance Criteria + + +- [ ] #1 CLI parsing, initial-command dispatch, and headless command execution no longer live as large inline flows in `src/main.ts`. +- [ ] #2 The new modules make the desktop startup path and headless startup path visibly separate and easier to test. +- [ ] #3 Existing CLI behaviors remain unchanged, including help output and startup gating behavior. +- [ ] #4 Targeted CLI/runtime tests cover the extracted path, and `bun run typecheck` passes. + + +## Implementation Plan + + +1. Map the current `parseArgs` / `handleInitialArgs` / `runHeadlessInitialCommand` / `handleCliCommand` flow in `src/main.ts`. +2. Extract a small startup-path selector plus dedicated runtime services for headless execution and interactive startup dispatch. +3. Keep Electron app ownership in `src/main.ts`; move only CLI orchestration and context assembly. +4. Verify with CLI-focused tests plus `bun run typecheck`. + diff --git a/backlog/tasks/task-238.3 - Introduce-domain-type-entrypoints-and-shrink-src-types.ts-import-surface.md b/backlog/tasks/task-238.3 - Introduce-domain-type-entrypoints-and-shrink-src-types.ts-import-surface.md new file mode 100644 index 0000000..ed28cfb --- /dev/null +++ b/backlog/tasks/task-238.3 - Introduce-domain-type-entrypoints-and-shrink-src-types.ts-import-surface.md @@ -0,0 +1,59 @@ +--- +id: TASK-238.3 +title: Introduce domain type entrypoints and shrink src/types.ts import surface +status: Done +assignee: [] +created_date: '2026-03-26 20:49' +updated_date: '2026-03-27 00:14' +labels: + - tech-debt + - types + - maintainability +milestone: m-0 +dependencies: [] +references: + - src/types.ts + - src/shared/ipc/contracts.ts + - src/config/service.ts + - docs/architecture/README.md +parent_task_id: TASK-238 +priority: medium +--- + +## Description + + +`src/types.ts` has become the repo-wide dumping ground for unrelated domains. Splitting it is still worthwhile, but a big-bang move would create noisy churn across a large import graph. Introduce domain entrypoints under `src/types/` and migrate the highest-churn imports first while leaving `src/types.ts` as a compatibility barrel until the new structure is proven. + + +## Acceptance Criteria + + +- [x] #1 Domain-focused type modules exist for the main clusters currently mixed together in `src/types.ts` (for example Anki, config/runtime, subtitle/media, and integration/runtime-option types). +- [x] #2 `src/types.ts` becomes a thinner compatibility layer or barrel instead of the sole source of truth for every shared type. +- [x] #3 A meaningful set of imports is migrated to the new entrypoints without breaking the maintained typecheck/test lanes. +- [x] #4 The new structure is documented well enough that contributors can tell where new shared types should live. + + +## Implementation Plan + + +1. Inventory the main type clusters in `src/types.ts` and choose stable domain seams. +2. Create `src/types/` modules and re-export through `src/types.ts` so the migration can be incremental. +3. Migrate the highest-value import sites first, especially config/runtime and Anki-heavy surfaces. +4. Verify with `bun run typecheck` and the cheapest test lane covering touched domains. + + +## Implementation Notes + + +Implemented domain entrypoints under `src/types/` and kept `src/types.ts` as a compatibility barrel (`src/types/anki.ts`, `src/types/config.ts`, `src/types/integrations.ts`, `src/types/runtime.ts`, `src/types/runtime-options.ts`, `src/types/subtitle.ts`). Migrated the highest-value import surfaces away from `src/types.ts` in config/runtime/Anki-related modules and shared IPC surfaces. Added type-level regression coverage in `src/types-domain-entrypoints.type-test.ts`. + +Aligned docs in `docs/architecture/README.md`, `docs/architecture/domains.md`, and `docs-site/changelog.md` to support the change and clear docs-site sync mismatch. + + +## Final Summary + + +Task completed with commit `5dd8bb7f` (`refactor: split shared type entrypoints`). The refactor introduced domain type entrypoints, shrank the `src/types.ts` import surface, updated import consumers, and recorded verification evidence in the local verifier artifacts. Backlog now tracks TASK-238.3 as done. + diff --git a/backlog/tasks/task-238.4 - Decompose-character-dictionary-runtime-into-fetch-build-and-cache-modules.md b/backlog/tasks/task-238.4 - Decompose-character-dictionary-runtime-into-fetch-build-and-cache-modules.md new file mode 100644 index 0000000..725cff3 --- /dev/null +++ b/backlog/tasks/task-238.4 - Decompose-character-dictionary-runtime-into-fetch-build-and-cache-modules.md @@ -0,0 +1,58 @@ +--- +id: TASK-238.4 +title: Decompose character dictionary runtime into fetch, build, and cache modules +status: Done +updated_date: '2026-03-27 00:20' +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - tech-debt + - runtime + - anilist + - maintainability +milestone: m-0 +dependencies: + - TASK-238.3 +references: + - src/main/character-dictionary-runtime.ts + - src/main/runtime/character-dictionary-auto-sync.ts + - docs/architecture/README.md +parent_task_id: TASK-238 +priority: medium +--- + +## Description + + +`src/main/character-dictionary-runtime.ts` is now one of the largest live production files in the repo and combines AniList transport, name normalization, snapshot/image shaping, cache management, and zip packaging. That file will keep growing as character-dictionary features evolve. Split it into focused modules so the runtime surface becomes orchestration instead of a catch-all implementation blob. + + +## Acceptance Criteria + + +- [x] #1 AniList fetch/parsing logic, dictionary-entry building, and snapshot/cache/zip persistence no longer live in one giant file. +- [x] #2 The public runtime API stays behavior-compatible for current callers. +- [x] #3 The top-level runtime/orchestration file becomes materially smaller and easier to review. +- [x] #4 Existing character-dictionary tests still pass, and new focused tests cover the extracted modules where needed. + + +## Implementation Plan + + +1. Identify the dominant concern boundaries inside `src/main/character-dictionary-runtime.ts`. +2. Extract fetch/transform/persist modules with narrow interfaces, keeping data-shape ownership explicit. +3. Leave the exported runtime API stable for current main-process callers. +4. Verify with the maintained character-dictionary/runtime test lane plus `bun run typecheck`. + + +## Implementation Notes + + +Split `src/main/character-dictionary-runtime.ts` into focused modules under `src/main/character-dictionary-runtime/` (`fetch`, `build`, `cache`, plus helper modules). The orchestrator stayed as a compatibility shim/API surface with delegated module functions. Added focused tests for cache snapshot semantics and term rebuild + collapsible-open-state behavior in the new modules. Updated runtime architecture docs in `docs/architecture/domains.md` and `docs-site/architecture.md`. + + +## Final Summary + + +Task completed with commit `5b06579e` (`refactor: split character dictionary runtime modules`). Runtime refactor landed with regression coverage and verification including runtime-compat lanes, and all changed behavior was validated as API-compatible for callers. + diff --git a/backlog/tasks/task-238.5 - Split-immersion-tracker-query-layer-into-focused-read-model-modules.md b/backlog/tasks/task-238.5 - Split-immersion-tracker-query-layer-into-focused-read-model-modules.md new file mode 100644 index 0000000..bd9d507 --- /dev/null +++ b/backlog/tasks/task-238.5 - Split-immersion-tracker-query-layer-into-focused-read-model-modules.md @@ -0,0 +1,61 @@ +--- +id: TASK-238.5 +title: Split immersion tracker query layer into focused read-model modules +status: Done +assignee: + - codex +created_date: '2026-03-26 20:49' +updated_date: '2026-03-27 00:00' +labels: + - tech-debt + - stats + - database + - maintainability +milestone: m-0 +dependencies: + - TASK-238.3 +references: + - src/core/services/immersion-tracker/query.ts + - src/core/services/stats-server.ts + - src/core/services/immersion-tracker-service.ts +parent_task_id: TASK-238 +priority: medium +--- + +## Description + + +`src/core/services/immersion-tracker/query.ts` has grown into a large mixed read/write/maintenance surface that owns library queries, timeline/detail queries, cleanup helpers, and rollup rebuild hooks. That size makes stats work harder to change safely. Split the query layer into focused read-model and maintenance modules so future stats/dashboard work does not keep landing in one 2500-line file. + + +## Acceptance Criteria + + +- [x] #1 Query responsibilities are grouped into focused modules such as library/session detail, vocabulary/kanji detail, and maintenance/cleanup helpers. +- [x] #2 The stats server and immersion tracker service depend on stable exported query surfaces instead of one monolithic file. +- [x] #3 The refactor preserves current SQL behavior and existing statistics outputs. +- [x] #4 Existing stats/immersion tests still pass, with added focused coverage where extraction creates new seams. + + +## Implementation Plan + + +1. Inventory the major query clusters and choose modules that match current caller boundaries. +2. Extract without changing schema or response contracts unless a narrow cleanup is required for compile/test health. +3. Keep SQL ownership close to the domain module that consumes it; avoid a giant `queries/` dump with no structure. +4. Verify with the maintained stats/immersion test lane plus `bun run typecheck`. + + +## Implementation Notes + + +Split the monolithic query surface into focused read-model modules for sessions, trends, lexical data, library lookups, and maintenance helpers. Updated the service and test imports to use the new module boundaries. + +Verification: `bun run typecheck` passed. Focused query and stats-server tests passed, including the `stats-server.test.ts` coverage around the new Bun fallback path. + + +## Final Summary + + +Extracted the immersion-tracker query layer into smaller read-model modules and kept the compatibility barrel in place so existing call sites can transition cleanly. Added focused coverage and verified the refactor with typecheck plus targeted tests. + diff --git a/backlog/tasks/task-238.6 - Extract-remaining-inline-runtime-logic-and-composer-gaps-from-src-main.ts.md b/backlog/tasks/task-238.6 - Extract-remaining-inline-runtime-logic-and-composer-gaps-from-src-main.ts.md new file mode 100644 index 0000000..9ef0d6d --- /dev/null +++ b/backlog/tasks/task-238.6 - Extract-remaining-inline-runtime-logic-and-composer-gaps-from-src-main.ts.md @@ -0,0 +1,84 @@ +--- +id: TASK-238.6 +title: Extract remaining inline runtime logic and composer gaps from src/main.ts +status: Done +assignee: [] +created_date: '2026-03-27 00:00' +updated_date: '2026-03-27 22:13' +labels: + - tech-debt + - runtime + - maintainability + - composers +milestone: m-0 +dependencies: + - TASK-238.1 + - TASK-238.2 +references: + - src/main.ts + - src/main/runtime/youtube-flow.ts + - src/main/runtime/autoplay-ready-gate.ts + - src/main/runtime/subtitle-prefetch-init.ts + - src/main/runtime/discord-presence-runtime.ts + - src/main/overlay-modal-state.ts + - src/main/runtime/composers +parent_task_id: TASK-238 +priority: high +--- + +## Description + + +`src/main.ts` still mixes two concerns: pure dependency wiring and inline runtime logic. The earlier composer extractions reduce the wiring burden, but the file still owns several substantial behavior blocks and a few large inline dependency groupings. This task tracks the next maintainability pass: move the remaining runtime logic into the appropriate domain modules, add missing composer wrappers for the biggest grouped handler blocks, and reassess whether a boot-phase split is still necessary after the entrypoint becomes mostly wiring. + + +## Acceptance Criteria + + +- [x] #1 `runYoutubePlaybackFlow`, `maybeSignalPluginAutoplayReady`, `refreshSubtitlePrefetchFromActiveTrack`, `publishDiscordPresence`, and `handleModalInputStateChange` no longer live as substantial inline logic in `src/main.ts`. +- [x] #2 The large subtitle/prefetch, stats startup, and overlay visibility dependency groupings are wrapped behind named composer helpers instead of remaining inline in `src/main.ts`. +- [x] #3 `src/main.ts` reads primarily as a boot and lifecycle coordinator, with domain behavior concentrated in named runtime modules. +- [x] #4 Focused tests cover the extracted behavior or the new composer surfaces. +- [x] #5 The task records whether the remaining size still justifies a boot-phase split or whether that follow-up can wait. + + +## Implementation Plan + + +Recommended sequence: + +1. Let the current composer and `setup-window-factory` work land first so this slice starts from a stable wiring baseline. +2. Extract the five inline runtime functions into their natural domain modules or direct equivalents. +3. Add or extend composer helpers for subtitle/prefetch, stats startup, and overlay visibility handler grouping. +4. Re-scan `src/main.ts` after the extraction and decide whether a boot-phase split is still the right next task. +5. Verify the extracted behavior with focused tests first, then run the relevant broader runtime gate if the slice crosses startup boundaries. + +Guardrails: + +- Keep the work behavior-preserving. +- Prefer moving logic to existing runtime surfaces over creating new giant helper files. +- Do not expand into unrelated `src/main.ts` cleanup that is already tracked by other TASK-238 slices. + + +## Implementation Notes + + +Extracted the remaining inline runtime seams from `src/main.ts` into focused runtime modules: +`src/main/runtime/youtube-playback-runtime.ts`, +`src/main/runtime/autoplay-ready-gate.ts`, +`src/main/runtime/subtitle-prefetch-runtime.ts`, +`src/main/runtime/discord-presence-runtime.ts`, +and `src/main/runtime/overlay-modal-input-state.ts`. + +Added named composer wrappers for the grouped subtitle/prefetch, stats startup, and overlay visibility wiring in `src/main/runtime/composers/`. + +Re-scan result for the boot-phase split follow-up: the entrypoint is materially closer to a boot/lifecycle coordinator now, so TASK-238.7 remains a valid future cleanup but no longer feels urgent or blocking for maintainability. + + +## Final Summary + + +TASK-238.6 is complete. Verification passed with `bun run typecheck`, focused runtime/composer tests, `bun run test:fast`, `bun run test:env`, and `bun run build`. The remaining `src/main.ts` work is now better isolated behind runtime modules and composer helpers, and the boot-phase split can wait for a later cleanup pass instead of being treated as immediate follow-on work. + +Backlog completion now includes changelog artifact `changes/2026-03-27-task-238.6-main-runtime-refactor.md` under runtime internals. + diff --git a/backlog/tasks/task-238.7 - Split-src-main.ts-into-boot-phase-services-runtimes-and-handlers.md b/backlog/tasks/task-238.7 - Split-src-main.ts-into-boot-phase-services-runtimes-and-handlers.md new file mode 100644 index 0000000..8fe8aad --- /dev/null +++ b/backlog/tasks/task-238.7 - Split-src-main.ts-into-boot-phase-services-runtimes-and-handlers.md @@ -0,0 +1,85 @@ +--- +id: TASK-238.7 +title: Split src/main.ts into boot-phase services, runtimes, and handlers +status: Done +assignee: [] +created_date: '2026-03-27 00:00' +updated_date: '2026-03-27 22:45' +labels: + - tech-debt + - runtime + - maintainability + - architecture +milestone: m-0 +dependencies: + - TASK-238.6 +references: + - src/main.ts + - src/main/boot/services.ts + - src/main/boot/runtimes.ts + - src/main/boot/handlers.ts + - src/main/runtime/composers +parent_task_id: TASK-238 +priority: high +--- + +## Description + + +After the remaining inline runtime logic and composer gaps are extracted, `src/main.ts` should be split along boot-phase boundaries so the entrypoint stops mixing service construction, domain runtime composition, and handler wiring in one file. This task tracks that structural split: move service instantiation, runtime composition, and handler orchestration into dedicated boot modules, then leave `src/main.ts` as a thin lifecycle coordinator with clear startup-path selection. + + +## Acceptance Criteria + + +- [x] #1 Service instantiation lives in a dedicated boot module instead of a large inline setup block in `src/main.ts`. +- [x] #2 Domain runtime composition lives in a dedicated boot module, separate from lifecycle and handler dispatch. +- [x] #3 Handler/composer invocation lives in a dedicated boot module, with `src/main.ts` reduced to app lifecycle and startup-path selection. +- [x] #4 Existing startup behavior remains unchanged across desktop and headless flows. +- [x] #5 Focused tests cover the split surfaces, and the relevant runtime/typecheck gate passes. + + +## Implementation Plan + + +Recommended sequence: + +1. Re-scan `src/main.ts` after TASK-238.6 lands and mark the remaining boot-phase seams by responsibility. +2. Extract service instantiation into `src/main/boot/services.ts` or equivalent. +3. Extract runtime composition into `src/main/boot/runtimes.ts` or equivalent. +4. Extract handler/composer orchestration into `src/main/boot/handlers.ts` or equivalent. +5. Shrink `src/main.ts` to startup-path selection, app lifecycle hooks, and minimal boot wiring. +6. Verify the split with focused entrypoint/runtime tests first, then run the broader runtime gate if the refactor crosses startup boundaries. + +Guardrails: + +- Keep the split behavior-preserving. +- Prefer small boot modules with narrow ownership over a new monolithic bootstrap layer. +- Do not reopen the inline logic work already tracked by TASK-238.6 unless a remaining seam truly belongs here. + + +## Implementation Notes + + +Added boot-phase modules under `src/main/boot/`: +`services.ts` for config/user-data/runtime-registry/overlay bootstrap service construction, +`runtimes.ts` for named runtime/composer entrypoints and grouped boot-phase seams, +and `handlers.ts` for handler/composer boot entrypoints. + +Rewired `src/main.ts` to source boot-phase service construction from `createMainBootServices(...)` and to route runtime/handler composition through boot-level exports instead of keeping the entrypoint as the direct owner of every composition import. + +Added focused tests for the new boot seams in +`src/main/boot/services.test.ts`, +`src/main/boot/runtimes.test.ts`, +and `src/main/boot/handlers.test.ts`. + +Updated internal architecture docs to note that `src/main/boot/` now owns boot-phase assembly seams so `src/main.ts` can stay centered on lifecycle coordination and startup-path selection. + + +## Final Summary + + +TASK-238.7 is complete. Verification passed with focused boot tests, `bun run typecheck`, `bun run test:fast`, and `bun run build`. `src/main.ts` still acts as the composition root, but the boot-phase split now moves service instantiation, runtime composition seams, and handler composition seams into dedicated `src/main/boot/*` modules so the entrypoint reads more like a lifecycle coordinator than a single monolithic bootstrap file. + +Backlog completion now includes changelog artifact `changes/2026-03-27-task-238.7-main-boot-split.md` for the internal runtime architecture pass. + diff --git a/backlog/tasks/task-239 - Mining-workflow-upgrades-prioritize-high-value-user-facing-improvements.md b/backlog/tasks/task-239 - Mining-workflow-upgrades-prioritize-high-value-user-facing-improvements.md new file mode 100644 index 0000000..6e1138d --- /dev/null +++ b/backlog/tasks/task-239 - Mining-workflow-upgrades-prioritize-high-value-user-facing-improvements.md @@ -0,0 +1,51 @@ +--- +id: TASK-239 +title: Mining workflow upgrades: prioritize high-value user-facing improvements +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - feature + - ux + - planning +milestone: m-2 +dependencies: [] +references: + - src/main.ts + - src/renderer + - src/anki-integration.ts + - src/config/service.ts +priority: medium +--- + +## Description + + +Track the next set of high-value workflow improvements surfaced by the March 2026 review. The goal is to capture bounded, implementation-sized feature slices with clear user value and avoid prematurely committing to much larger bets like hard-sub OCR, plugin marketplace infrastructure, or cloud config sync. Focus this parent task on features that improve the core mining workflow directly: profile-aware setup, action discoverability, previewing output before mining, and selecting richer subtitle ranges. + + +## Acceptance Criteria + + +- [ ] #1 Child tasks exist for the selected near-to-medium-term workflow upgrades with explicit scope and exclusions. +- [ ] #2 The parent task records the recommended sequencing so future work starts with the best value/risk ratio. +- [ ] #3 The tracked feature set stays grounded in existing product surfaces instead of speculative external-platform integrations. + + +## Implementation Plan + + +Recommended sequencing: + +1. Start TASK-239.3 first. Template preview is the smallest high-signal UX win on a core mining path. +2. Start TASK-239.2 next. A command palette improves discoverability across existing actions without large backend upheaval. +3. Start TASK-239.4 after the preview/palette work. Sentence clipping is high-value but touches runtime, subtitle selection, and card creation flows together. +4. Keep TASK-239.1 as a foundation project and scope it narrowly to local multi-profile support. Do not expand it into cloud sync in the same slice. + +Deliberate exclusions for now: + +- hard-sub OCR +- plugin marketplace infrastructure +- cloud/device sync +- site-specific streaming source auto-detection beyond narrow discovery spikes + diff --git a/backlog/tasks/task-239.1 - Add-profile-aware-config-foundations-and-profile-selection-flow.md b/backlog/tasks/task-239.1 - Add-profile-aware-config-foundations-and-profile-selection-flow.md new file mode 100644 index 0000000..e0f8ad5 --- /dev/null +++ b/backlog/tasks/task-239.1 - Add-profile-aware-config-foundations-and-profile-selection-flow.md @@ -0,0 +1,46 @@ +--- +id: TASK-239.1 +title: Add profile-aware config foundations and profile selection flow +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - feature + - config + - launcher + - ux +milestone: m-2 +dependencies: [] +references: + - src/config/service.ts + - src/config/load.ts + - launcher/config.ts + - src/main.ts +parent_task_id: TASK-239 +priority: high +--- + +## Description + + +Introduce the foundation for local multi-profile use so users can keep separate setups for different workflows without hand-editing or swapping config files manually. Keep the first slice intentionally narrow: named local profiles, explicit selection, separate config/data paths, and safe migration from the current single-profile setup. Do not couple this task to cloud sync or remote profile sharing. + + +## Acceptance Criteria + + +- [ ] #1 Users can create/select a named local profile and launch SubMiner against that profile explicitly. +- [ ] #2 Each profile uses separate config and data storage paths for settings and profile-scoped runtime state that should not bleed across workflows. +- [ ] #3 Existing single-profile users migrate safely to a default profile without losing settings. +- [ ] #4 The active profile is visible in the launcher/app surface where it materially affects user behavior. +- [ ] #5 Tests cover profile resolution, migration/defaulting behavior, and at least one end-to-end selection path. + + +## Implementation Plan + + +1. Design a minimal profile storage layout and resolution strategy that works for launcher and desktop runtime entrypoints. +2. Add profile selection plumbing before changing feature behavior inside individual services. +3. Migrate config/data-path resolution to be profile-aware while preserving a safe default-profile fallback. +4. Verify with config/launcher tests plus targeted runtime coverage. + diff --git a/backlog/tasks/task-239.2 - Add-a-searchable-command-palette-for-desktop-actions.md b/backlog/tasks/task-239.2 - Add-a-searchable-command-palette-for-desktop-actions.md new file mode 100644 index 0000000..b33a6f3 --- /dev/null +++ b/backlog/tasks/task-239.2 - Add-a-searchable-command-palette-for-desktop-actions.md @@ -0,0 +1,46 @@ +--- +id: TASK-239.2 +title: Add a searchable command palette for desktop actions +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - feature + - ux + - desktop + - shortcuts +milestone: m-2 +dependencies: [] +references: + - src/renderer + - src/shared/ipc/contracts.ts + - src/main/runtime/overlay-runtime-options.ts + - src/main.ts +parent_task_id: TASK-239 +priority: medium +--- + +## Description + + +SubMiner already exposes many actions through scattered shortcuts, menus, and modal flows. Add a searchable command palette so users can discover and execute high-value desktop actions from one keyboard-first surface. Build on the existing runtime-options/modal infrastructure where practical instead of creating a completely separate interaction model. + + +## Acceptance Criteria + + +- [ ] #1 A keyboard-accessible command palette opens from the desktop app and lists supported actions with searchable labels. +- [ ] #2 Commands are backed by an explicit registry so action availability and labels are not hard-coded in one renderer component. +- [ ] #3 Users can navigate and execute commands entirely from the keyboard. +- [ ] #4 The first slice includes the highest-value existing actions rather than trying to cover every possible command on day one. +- [ ] #5 Tests cover command filtering, execution dispatch, and at least one disabled/unavailable command state. + + +## Implementation Plan + + +1. Define a small command-registry contract shared across renderer and main-process dispatch. +2. Reuse existing modal/runtime plumbing where it fits so the palette is a thin discoverability layer over current actions. +3. Ship a narrow but useful initial command set, then expand later based on usage. +4. Verify with renderer tests plus targeted IPC/runtime tests. + diff --git a/backlog/tasks/task-239.3 - Add-live-Anki-template-preview-for-card-output.md b/backlog/tasks/task-239.3 - Add-live-Anki-template-preview-for-card-output.md new file mode 100644 index 0000000..214ffb8 --- /dev/null +++ b/backlog/tasks/task-239.3 - Add-live-Anki-template-preview-for-card-output.md @@ -0,0 +1,45 @@ +--- +id: TASK-239.3 +title: Add live Anki template preview for card output +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - feature + - anki + - ux +milestone: m-2 +dependencies: [] +references: + - src/anki-integration.ts + - src/anki-integration/card-creation.ts + - src/config/resolve/anki-connect.ts + - src/renderer +parent_task_id: TASK-239 +priority: high +--- + +## Description + + +Users currently have to infer what card output will look like from config fields and post-mine results. Add a live preview surface that shows the resolved card template output before mining so users can catch broken field mappings, missing media, or undesirable formatting earlier. + + +## Acceptance Criteria + + +- [ ] #1 Users can open a preview that renders the resolved front/back field output for the current note/card template configuration. +- [ ] #2 The preview clearly surfaces missing or unmapped fields instead of silently showing blank content. +- [ ] #3 Preview generation uses the same transformation logic as the live card-creation path so it stays trustworthy. +- [ ] #4 The first slice works with representative sample mining payloads and handles missing optional media gracefully. +- [ ] #5 Tests cover preview rendering for at least one valid and one invalid/missing-field configuration. + + +## Implementation Plan + + +1. Identify the current card-creation data path and extract any logic needed to render a preview without duplicating transformation rules. +2. Add a focused preview UI in the most relevant existing configuration/setup surface. +3. Surface validation/warning states for empty mappings, missing fields, and media-dependent outputs. +4. Verify with Anki integration tests plus renderer coverage for preview states. + diff --git a/backlog/tasks/task-239.4 - Add-sentence-clipping-from-arbitrary-subtitle-ranges.md b/backlog/tasks/task-239.4 - Add-sentence-clipping-from-arbitrary-subtitle-ranges.md new file mode 100644 index 0000000..aa86097 --- /dev/null +++ b/backlog/tasks/task-239.4 - Add-sentence-clipping-from-arbitrary-subtitle-ranges.md @@ -0,0 +1,46 @@ +--- +id: TASK-239.4 +title: Add sentence clipping from arbitrary subtitle ranges +status: To Do +assignee: [] +created_date: '2026-03-26 20:49' +labels: + - feature + - subtitle + - anki + - ux +milestone: m-2 +dependencies: [] +references: + - src/renderer/modals/subtitle-sidebar.ts + - src/main/runtime/subtitle-position.ts + - src/anki-integration/card-creation.ts + - src/main/runtime/mpv-main-event-actions.ts +parent_task_id: TASK-239 +priority: medium +--- + +## Description + + +Current mining flows are optimized around the active subtitle line. Add a sentence-clipping workflow that lets users select an arbitrary contiguous subtitle range, preview the combined text/timing, and mine from that selection. This should improve multi-line dialogue capture without forcing manual copy/paste or separate post-processing. + + +## Acceptance Criteria + + +- [ ] #1 Users can select a contiguous subtitle range from the existing subtitle UI instead of being limited to the active cue. +- [ ] #2 The workflow previews the combined text and resulting timing range before mining. +- [ ] #3 Mining from a clipped range uses the combined subtitle payload in card generation while preserving existing single-line behavior. +- [ ] #4 The feature handles overlapping/edge timing cases predictably and does not corrupt the normal active-cue flow. +- [ ] #5 Tests cover range selection, combined payload generation, and at least one card-creation path using a clipped selection. + + +## Implementation Plan + + +1. Define a selection model that fits the existing subtitle sidebar/runtime data flow. +2. Add preview + confirmation UI before routing the clipped payload into mining. +3. Keep the existing single-line path intact and treat clipping as an additive workflow. +4. Verify with subtitle-sidebar, runtime, and Anki/card-creation tests. + diff --git a/backlog/tasks/task-240 - Migrate-SubMiner-agent-skills-into-a-repo-local-plugin-workflow.md b/backlog/tasks/task-240 - Migrate-SubMiner-agent-skills-into-a-repo-local-plugin-workflow.md new file mode 100644 index 0000000..cf2c44c --- /dev/null +++ b/backlog/tasks/task-240 - Migrate-SubMiner-agent-skills-into-a-repo-local-plugin-workflow.md @@ -0,0 +1,81 @@ +--- +id: TASK-240 +title: Migrate SubMiner agent skills into a repo-local plugin workflow +status: Done +assignee: + - codex +created_date: '2026-03-26 00:00' +updated_date: '2026-03-26 23:23' +labels: + - skills + - plugin + - workflow + - backlog + - tooling +dependencies: + - TASK-159 + - TASK-160 +priority: high +ordinal: 24000 +--- + +## Description + + + +Turn the current SubMiner-specific repo skills into a reproducible repo-local plugin workflow. The plugin should become the canonical source of truth for the SubMiner scrum-master and change-verification skills, bundle the scripts and metadata needed to test and validate changes, and preserve compatibility for existing repo references through thin `.agents/skills/` shims while the migration settles. + + + +## Acceptance Criteria + + + +- [x] #1 A repo-local plugin scaffold exists for the SubMiner workflow, with manifest and marketplace metadata wired according to the repo-local plugin layout. +- [x] #2 `subminer-scrum-master` and `subminer-change-verification` live under the plugin as the canonical skill sources, along with any helper scripts or supporting files needed for reproducible use. +- [x] #3 Existing repo-level `.agents/skills/` entrypoints are reduced to compatibility shims or redirects instead of remaining as duplicate sources of truth. +- [x] #4 The plugin-owned workflow explicitly documents backlog-first orchestration and change verification expectations, including how the skills work together. +- [x] #5 The migration is validated with the cheapest sufficient repo-native verification lane and the task records the exact commands and any skips/blockers. + + +## Implementation Plan + + + +1. Inspect the plugin-creator contract and current repo skill/script layout, then choose the plugin name, directory structure, and migration boundaries. +2. Scaffold a repo-local plugin plus marketplace entry, keeping the plugin payload under `plugins//` and the catalog entry under `.agents/plugins/marketplace.json`. +3. Move the two SubMiner-specific skills and their helper scripts into the plugin as the canonical source, adding any plugin docs or supporting metadata needed for reproducible testing/validation. +4. Replace the existing `.agents/skills/subminer-*` surfaces with minimal compatibility shims that point agents at the plugin-owned sources without duplicating logic. +5. Update internal docs or references that should now describe the plugin-first workflow. +6. Run the cheapest sufficient verification lane for plugin/internal-doc changes and record the results in this task. + + +## Implementation Notes + + + +2026-03-26: User approved the migration shape where the plugin becomes the canonical source of truth and `.agents/skills/` stays only as compatibility shims. Repo-local plugin chosen over home-local plugin. + +2026-03-26: Backlog MCP resources/tools are not available in this Codex session (`MCP startup failed`), so this task is being initialized directly in the repo-local `backlog/` files instead of through the live Backlog MCP interface. + +2026-03-26: Scaffolded `plugins/subminer-workflow/` plus `.agents/plugins/marketplace.json`, moved the scrum-master and change-verification skill definitions into the plugin as the canonical sources, and converted the old `.agents/skills/` surfaces into compatibility shims. Preserved the old verifier script entrypoints as wrappers because backlog/docs history already calls them directly. + +2026-03-26: Verification passed. + +- `bash -n plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh` +- `bash -n plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh` +- `bash -n .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh` +- `bash -n .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh` +- `bash .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh plugins/subminer-workflow/.codex-plugin/plugin.json docs/workflow/agent-plugins.md .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh` +- `bash .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh --lane docs plugins/subminer-workflow .agents/skills/subminer-scrum-master/SKILL.md .agents/skills/subminer-change-verification/SKILL.md .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh .agents/plugins/marketplace.json docs/workflow/README.md docs/workflow/agent-plugins.md 'backlog/tasks/task-240 - Migrate-SubMiner-agent-skills-into-a-repo-local-plugin-workflow.md'` +- Verifier artifacts: `.tmp/skill-verification/subminer-verify-20260326-232300-E2NQVX/` + + + +## Final Summary + + + +Created a repo-local `subminer-workflow` plugin as the canonical packaging for the SubMiner scrum-master and change-verification workflow. The plugin now owns both skills, the verifier helper scripts, plugin metadata, and workflow docs. The old `.agents/skills/` surfaces remain only as compatibility shims, and the old verifier script paths now forward to the plugin-owned scripts so existing docs and backlog commands continue to work. Targeted plugin/docs verification passed, including wrapper-script syntax checks and a real verifier run through the legacy entrypoint. + + diff --git a/backlog/tasks/task-241 - Add-optional-setup-action-to-seed-subminer-mpv-profile.md b/backlog/tasks/task-241 - Add-optional-setup-action-to-seed-subminer-mpv-profile.md new file mode 100644 index 0000000..71f1e1a --- /dev/null +++ b/backlog/tasks/task-241 - Add-optional-setup-action-to-seed-subminer-mpv-profile.md @@ -0,0 +1,37 @@ +id: TASK-241 +title: Add optional setup action to seed SubMiner mpv profile +type: feature +status: Open +assignee: [] +created_date: '2026-03-27 11:22' +updated_date: '2026-03-27 11:22' +labels: + - setup + - mpv + - docs + - ux +dependencies: [] +references: [] +documentation: + - /home/sudacode/projects/japanese/SubMiner/docs-site/usage.md + - /home/sudacode/projects/japanese/SubMiner/docs-site/launcher-script.md +ordinal: 24100 +--- + +## Description + + +Add an optional control in the first-run / setup flow to write or update the user’s mpv configuration with SubMiner-recommended defaults (especially the `subminer` profile), so users can recover from a missing profile without manual config editing. + +The docs for launcher usage must explicitly state that SubMiner’s Windows mpv launcher path runs mpv with `--profile=subminer` by default. + + +## Acceptance Criteria + + +- [ ] #1 Add an optional setup UI action/button to generate or overwrite a user-confirmed mpv config that includes a `subminer` profile. +- [ ] #2 The action should be non-destructive by default, show diff/contents before write, and support append/update mode when other mpv settings already exist. +- [ ] #3 Document how to resolve the missing-profile scenario and clearly state that the SubMiner mpv launcher runs with `--profile=subminer` by default (`--launch-mpv` / Windows mpv shortcut path). +- [ ] #4 Add/adjust setup validation messaging so users are not blocked if `subminer` profile is initially missing, but can opt into one-click setup recovery. +- [ ] #5 Include a short verification path for both Windows and non-Windows flows (for example dry-run + write path). + diff --git a/backlog/tasks/task-242 - Fix-stats-server-Bun-fallback-in-coverage-lane.md b/backlog/tasks/task-242 - Fix-stats-server-Bun-fallback-in-coverage-lane.md new file mode 100644 index 0000000..346cbff --- /dev/null +++ b/backlog/tasks/task-242 - Fix-stats-server-Bun-fallback-in-coverage-lane.md @@ -0,0 +1,35 @@ +--- +id: TASK-242 +title: Fix stats server Bun fallback in coverage lane +status: Done +assignee: [] +created_date: '2026-03-29 07:31' +updated_date: '2026-03-29 07:37' +labels: + - ci + - bug +milestone: cleanup +dependencies: [] +references: + - 'PR #36' +priority: high +--- + +## Description + + +Coverage CI fails when `startStatsServer` reaches the Bun server seam under the maintained source lane. Add a runtime fallback that works when `Bun.serve` is unavailable and keep the stats-server startup path testable. + + +## Acceptance Criteria + +- [x] #1 `bun run test:coverage:src` passes in GitHub CI +- [x] #2 `startStatsServer` uses `Bun.serve` when present and a Node server fallback otherwise +- [x] #3 Regression coverage exists for the fallback startup path + + +## Final Summary + + +Fixed the CI failure in the coverage lane by replacing the Bun-only stats server path with a Bun-or-node/http startup fallback and by normalizing setup window options so undefined BrowserWindow fields are omitted. Verified the exact coverage lane under Bun 1.3.5 and confirmed the GitHub Actions run for PR #36 completed successfully. + diff --git a/backlog/tasks/task-245 - Cut-minor-release-v0.10.0-for-docs-and-release-prep.md b/backlog/tasks/task-245 - Cut-minor-release-v0.10.0-for-docs-and-release-prep.md new file mode 100644 index 0000000..3807634 --- /dev/null +++ b/backlog/tasks/task-245 - Cut-minor-release-v0.10.0-for-docs-and-release-prep.md @@ -0,0 +1,68 @@ +--- +id: TASK-245 +title: Cut minor release v0.10.0 for docs and release prep +status: Done +assignee: + - '@codex' +created_date: '2026-03-29 08:10' +updated_date: '2026-03-29 08:13' +labels: + - release + - docs + - minor +dependencies: [] +references: + - /home/sudacode/projects/japanese/SubMiner/package.json + - /home/sudacode/projects/japanese/SubMiner/README.md + - /home/sudacode/projects/japanese/SubMiner/docs/RELEASING.md + - /home/sudacode/projects/japanese/SubMiner/docs/README.md + - /home/sudacode/projects/japanese/SubMiner/docs-site/changelog.md + - /home/sudacode/projects/japanese/SubMiner/CHANGELOG.md + - /home/sudacode/projects/japanese/SubMiner/release/release-notes.md +priority: high +ordinal: 54850 +--- + +## Description + + +Prepare the next 0-ver minor release cut as `v0.10.0`, keeping release-facing docs, backlog, and changelog artifacts aligned, then run the release-prep verification gate. + + +## Acceptance Criteria + +- [x] #1 Repository version metadata is updated to `0.10.0`. +- [x] #2 Release-facing docs and public changelog surfaces are aligned for the `v0.10.0` cut. +- [x] #3 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.10.0` section and any consumed fragments are removed. +- [x] #4 Release-prep verification passes for changelog, config example, typecheck, tests, and build. + + +## Implementation Notes + + +Completed: +- Bumped `package.json` from `0.9.3` to `0.10.0`. +- Ran `bun run changelog:build --version 0.10.0 --date 2026-03-29`, which generated `CHANGELOG.md` and `release/release-notes.md` and removed the queued `changes/*.md` fragments. +- Updated `docs-site/changelog.md` with the public-facing `v0.10.0` summary. + +Verification: +- `bun run changelog:lint` +- `bun run changelog:check --version 0.10.0` +- `bun run verify:config-example` +- `bun run typecheck` +- `bunx bun@1.3.5 run test:fast` +- `bunx bun@1.3.5 run test:env` +- `bunx bun@1.3.5 run build` +- `bunx bun@1.3.5 run docs:test` +- `bunx bun@1.3.5 run docs:build` + +Notes: +- The local `bun` binary is `1.3.11`, which tripped Bun's nested `node:test` handling in `test:fast`; rerunning with the repo-pinned `bun@1.3.5` cleared the issue. +- No README content change was necessary for this cut. + + +## Final Summary + + +Prepared the `v0.10.0` release cut locally. Bumped `package.json`, generated committed root changelog and release notes, updated the public docs changelog summary, and verified the release gate with the repo-pinned Bun `1.3.5` runtime. The release prep is green and ready for tagging/publishing when desired. + diff --git a/backlog/tasks/task-246 - Migrate-Discord-Rich-Presence-to-maintained-RPC-wrapper.md b/backlog/tasks/task-246 - Migrate-Discord-Rich-Presence-to-maintained-RPC-wrapper.md new file mode 100644 index 0000000..54e4ba3 --- /dev/null +++ b/backlog/tasks/task-246 - Migrate-Discord-Rich-Presence-to-maintained-RPC-wrapper.md @@ -0,0 +1,55 @@ +--- +id: TASK-246 +title: Migrate Discord Rich Presence to maintained RPC wrapper +status: Done +assignee: [] +created_date: '2026-03-29 08:17' +updated_date: '2026-03-29 08:22' +labels: + - dependency + - discord + - presence +dependencies: [] +priority: medium +--- + +## Description + + +Replace the deprecated Discord Rich Presence wrapper with a maintained JavaScript alternative while preserving the current IPC-based presence behavior in the Electron main process. + + +## Acceptance Criteria + +- [x] #1 The app no longer depends on `discord-rpc` +- [x] #2 Discord Rich Presence still logs in and publishes activity updates from the main process +- [x] #3 Existing Discord presence tests continue to pass or are updated to cover the new client API +- [x] #4 The change is documented in the release notes or changelog fragment + + +## Implementation Notes + + +Completed: +- Swapped the app's Discord RPC dependency from `discord-rpc` to `@xhayper/discord-rpc`. +- Extracted the client adapter into `src/main/runtime/discord-rpc-client.ts` so the main process can keep using a small wrapper around the maintained library. +- Added `src/main/runtime/discord-rpc-client.test.ts` to verify the adapter forwards login/activity/clear/destroy calls through `client.user`. +- Documented the dependency swap in `CHANGELOG.md`, `release/release-notes.md`, and `docs-site/changelog.md`. + +Verification: +- `bunx bun@1.3.5 test src/main/runtime/discord-rpc-client.test.ts src/core/services/discord-presence.test.ts` +- `bunx bun@1.3.5 run changelog:lint` +- `bunx bun@1.3.5 run changelog:check --version 0.10.0` +- `bunx bun@1.3.5 run docs:test` +- `bunx bun@1.3.5 run docs:build` + +Notes: +- The existing release prep artifacts for v0.10.0 were kept intact and updated in place. +- No README change was needed for this dependency swap. + + +## Final Summary + + +Replaced the deprecated `discord-rpc` dependency with the maintained `@xhayper/discord-rpc` wrapper while preserving the main-process rich presence flow. Added a focused runtime wrapper test, kept the existing Discord presence service tests green, and documented the dependency swap in the release notes and changelog. + diff --git a/backlog/tasks/task-247 - Strip-inline-subtitle-markup-from-subtitle-sidebar-cues.md b/backlog/tasks/task-247 - Strip-inline-subtitle-markup-from-subtitle-sidebar-cues.md new file mode 100644 index 0000000..6b40532 --- /dev/null +++ b/backlog/tasks/task-247 - Strip-inline-subtitle-markup-from-subtitle-sidebar-cues.md @@ -0,0 +1,60 @@ +--- +id: TASK-247 +title: Strip inline subtitle markup from subtitle sidebar cues +status: Done +assignee: + - codex +created_date: '2026-03-29 10:01' +updated_date: '2026-03-29 10:10' +labels: [] +dependencies: [] +references: + - src/core/services/subtitle-cue-parser.ts + - src/renderer/modals/subtitle-sidebar.ts + - src/core/services/subtitle-cue-parser.test.ts +--- + +## Description + + +Subtitle sidebar should display readable subtitle text when loaded subtitle files include inline markup such as HTML-like font tags. Parsed cue text currently preserves markup, causing raw tags to appear in the sidebar instead of clean subtitle content. + + +## Acceptance Criteria + +- [x] #1 Subtitle sidebar cue text omits inline subtitle markup such as HTML-like font tags while preserving visible subtitle content. +- [x] #2 Parsed subtitle cues used by the sidebar keep timing order and expected line-break behavior after markup sanitization. +- [x] #3 Regression tests cover markup-bearing subtitle cue parsing so raw tags do not reappear in the sidebar. + + +## Implementation Plan + + +1. Add regression tests in src/core/services/subtitle-cue-parser.test.ts for subtitle cues containing HTML-like font tags, including multi-line content. +2. Verify the new parser test fails against current behavior to confirm the bug is covered. +3. Update src/core/services/subtitle-cue-parser.ts to sanitize inline subtitle markup while preserving visible text and expected newline handling. +4. Re-run focused parser tests, then run broader verification commands required for handoff as practical. +5. Update task notes/acceptance criteria based on verified results and finalize the task record. + + +## Implementation Notes + + +User approved implementation on 2026-03-29. + +Implemented parser-level subtitle cue sanitization for HTML-like tags so loaded sidebar cues render readable text while preserving cue line breaks. + +Added regression coverage for SRT and ASS cue parsing with markup. + +Verification: bun test src/core/services/subtitle-cue-parser.test.ts; bun run typecheck; bun run test:fast; bun run test:env; bun run build; bun run test:smoke:dist. + + +## Final Summary + + +Sanitized parsed subtitle cue text in src/core/services/subtitle-cue-parser.ts so HTML-like inline markup such as is removed before cues reach the subtitle sidebar. The sanitizer is shared across SRT/VTT-style parsing and ASS parsing, while existing cue timing and line-break semantics remain intact. + +Added regression tests in src/core/services/subtitle-cue-parser.test.ts covering markup-bearing SRT lines and ASS dialogue lines with \N breaks, and verified the original failure before implementing the fix. + +Tests run: bun test src/core/services/subtitle-cue-parser.test.ts; bun run typecheck; bun run test:fast; bun run test:env; bun run build; bun run test:smoke:dist. + diff --git a/backlog/tasks/task-248 - Fix-macOS-visible-overlay-toggle-getting-immediately-restored.md b/backlog/tasks/task-248 - Fix-macOS-visible-overlay-toggle-getting-immediately-restored.md new file mode 100644 index 0000000..1430d7a --- /dev/null +++ b/backlog/tasks/task-248 - Fix-macOS-visible-overlay-toggle-getting-immediately-restored.md @@ -0,0 +1,69 @@ +--- +id: TASK-248 +title: Fix macOS visible overlay toggle getting immediately restored +status: Done +assignee: [] +created_date: '2026-03-29 10:03' +updated_date: '2026-03-29 22:14' +labels: [] +dependencies: [] +references: + - /Users/sudacode/projects/japanese/SubMiner/plugin/subminer/process.lua + - /Users/sudacode/projects/japanese/SubMiner/plugin/subminer/ui.lua + - /Users/sudacode/projects/japanese/SubMiner/src/core/services/cli-command.ts + - >- + /Users/sudacode/projects/japanese/SubMiner/src/main/overlay-visibility-runtime.ts +--- + +## Description + + +Investigate and fix the visible overlay toggle path on macOS so the user can reliably hide the overlay after it has been shown. The current behavior can ignore the toggle or hide the overlay briefly before it is restored immediately. + + +## Acceptance Criteria + +- [x] #1 Pressing the visible-overlay toggle hides the overlay when it is currently shown on macOS. +- [x] #2 A manual hide is not immediately undone by startup or readiness flows. +- [x] #3 The mpv/plugin toggle path matches the intended visible-overlay toggle behavior. +- [x] #4 Regression tests cover the failing toggle path. + + +## Implementation Plan + + +1. Reproduce the toggle/re-show logic from code paths around mpv plugin control commands and auto-play readiness. +2. Add regression coverage for manual toggle-off staying hidden through readiness completion. +3. Patch the plugin/control path so manual visible-overlay toggles are not undone by readiness auto-show. +4. Run targeted tests, then the relevant verification lane. + + +## Implementation Notes + + +Root cause: the mpv plugin readiness callback (`subminer-autoplay-ready`) could re-issue `--show-visible-overlay` after a manual toggle/hide. Initial fix only suppressed the next readiness restore, but repeated readiness callbacks in the same media session could still re-show the overlay. The plugin toggle path also still used legacy `--toggle` instead of the explicit visible-overlay command. + +Implemented a session-scoped suppression flag in the Lua plugin so a manual hide/toggle during the pause-until-ready window blocks readiness auto-show for the rest of the current auto-start session, then resets on the next auto-start session. + +Added Lua regression coverage for both behaviors: manual toggle-off stays hidden through readiness completion, repeated readiness callbacks in the same session stay suppressed, and `subminer-toggle` emits `--toggle-visible-overlay` rather than legacy `--toggle`. + +Follow-up investigation found a second issue in `src/core/services/cli-command.ts`: pure visible-overlay toggle commands still ran the MPV connect/start path (`connectMpvClient`) because `--toggle` and `--toggle-visible-overlay` were classified as start-like commands. That side effect could retrigger startup visibility work even after the plugin-side fix. + +Updated CLI command handling so only `--start` reconnects MPV. Pure toggle/show/hide overlay commands still initialize overlay runtime when needed, but they no longer restart/reconnect the MPV control path. + +Renderer/modal follow-ups: restored focused-overlay mpv y-chord proxy in `src/renderer/handlers/keyboard.ts`, added a modal-close guard in `src/main/overlay-runtime.ts` so modal teardown does not re-show a manually hidden overlay, and added a duplicate-toggle debounce in `src/main/runtime/overlay-visibility-actions.ts` to ignore near-simultaneous toggle requests inside the main process. + +2026-03-29: added regression for repeated subminer-autoplay-ready signals after manual y-t hide. Root cause: Lua plugin suppression only blocked the first ready-time restore, so later ready callbacks in the same media session could re-show the visible overlay. Updated plugin suppression to remain active for the full current auto-start session and reset on the next auto-start trigger. + +2026-03-29: live mpv log showed repeated `subminer-autoplay-ready` script messages from Electron during paused startup, each triggering plugin `--show-visible-overlay` and immediate re-show. Fixed `src/main/runtime/autoplay-ready-gate.ts` so plugin readiness is signaled once per media while paused retry loops only re-issue `pause=false` instead of re-signaling readiness. + +2026-03-29: Added window-level guard for stray visible-overlay re-show on macOS. `src/core/services/overlay-window.ts` now immediately re-hides the visible overlay window on `show` if overlay state is false, covering native/Electron re-show paths that bypass normal visibility actions. Regression: `src/core/services/overlay-window.test.ts`. Verified with full gate and rebuilt unsigned mac bundle. + +2026-03-29: added a blur-path guard for the visible overlay window. `src/core/services/overlay-window.ts` now skips topmost restacking when a visible-overlay blur fires after overlay state already flipped off, covering a macOS hide-in-flight path that could immediately reassert the window. Regression coverage added in `src/core/services/overlay-window.test.ts`; verified with targeted overlay tests, full gate, and rebuilt unsigned mac bundle. + + +## Final Summary + + +Confirmed with user that macOS `y-t` now works. Cleaned the patch set down to the remaining justified fixes: explicit visible-overlay plugin toggle/suppression, pure-toggle CLI no longer reconnects MPV, autoplay-ready signaling only fires once per media, and the final visible-overlay blur guard that stops macOS restacking after a manual hide. Full gate passed again before commit `c939c580` (`fix: stabilize macOS visible overlay toggle`). + diff --git a/backlog/tasks/task-249 - Fix-AniList-token-persistence-on-setup-login.md b/backlog/tasks/task-249 - Fix-AniList-token-persistence-on-setup-login.md new file mode 100644 index 0000000..abf7501 --- /dev/null +++ b/backlog/tasks/task-249 - Fix-AniList-token-persistence-on-setup-login.md @@ -0,0 +1,37 @@ +--- +id: TASK-249 +title: Fix AniList token persistence on setup login +status: Done +assignee: [] +created_date: '2026-03-29 10:08' +updated_date: '2026-03-29 19:42' +labels: + - anilist + - bug +dependencies: [] +documentation: + - src/main/runtime/anilist-setup.ts + - src/core/services/anilist/anilist-token-store.ts + - src/main/runtime/anilist-token-refresh.ts + - docs-site/anilist-integration.md +priority: high +--- + +## Description + + +AniList setup can appear successful but the token is not persisted across restarts. Investigate the setup callback and token store path so the app either saves the token reliably or surfaces persistence failure instead of reopening setup on every launch. + + +## Acceptance Criteria + +- [ ] #1 AniList setup login persists a usable token across app restarts when safeStorage works +- [ ] #2 If token persistence fails the setup flow reports the failure instead of pretending login succeeded +- [ ] #3 Regression coverage exists for the callback/save path and the refresh path that reopens setup when no token is available + + +## Final Summary + + +Pinned installed mpv plugin configs to the current SubMiner binary so standalone mpv launches reuse the same app identity that saved AniList tokens. Added startup self-heal for existing blank binary_path configs, install-time binary_path writes for fresh plugin installs, regression tests for both paths, and docs updates describing the new behavior. + diff --git a/backlog/tasks/task-250 - Restore-macOS-mpv-passthrough-while-overlay-subtitle-sidebar-is-open.md b/backlog/tasks/task-250 - Restore-macOS-mpv-passthrough-while-overlay-subtitle-sidebar-is-open.md new file mode 100644 index 0000000..7977bdd --- /dev/null +++ b/backlog/tasks/task-250 - Restore-macOS-mpv-passthrough-while-overlay-subtitle-sidebar-is-open.md @@ -0,0 +1,72 @@ +--- +id: TASK-250 +title: Restore macOS mpv passthrough while overlay subtitle sidebar is open +status: Done +assignee: + - '@codex' +created_date: '2026-03-29 10:10' +updated_date: '2026-03-29 10:23' +labels: + - bug + - macos + - subtitle-sidebar + - overlay + - mpv +dependencies: [] +references: + - >- + /Users/sudacode/projects/japanese/SubMiner/src/renderer/overlay-mouse-ignore.ts + - >- + /Users/sudacode/projects/japanese/SubMiner/src/renderer/modals/subtitle-sidebar.ts + - /Users/sudacode/projects/japanese/SubMiner/src/renderer/handlers/keyboard.ts + - >- + /Users/sudacode/projects/japanese/SubMiner/src/renderer/modals/subtitle-sidebar.test.ts + - >- + /Users/sudacode/projects/japanese/SubMiner/src/renderer/overlay-mouse-ignore.test.ts +priority: high +--- + +## Description + + +When the overlay-layout subtitle sidebar is open on macOS, users should still be able to click through outside the sidebar and return keyboard focus to mpv so native mpv keybindings continue to work. The sidebar should stay interactive when hovered or focused, but it must not make the whole visible overlay behave like a blocking modal. + + +## Acceptance Criteria + +- [x] #1 Opening the overlay-layout subtitle sidebar does not keep the entire visible overlay mouse-interactive outside sidebar hover or focus. +- [x] #2 With the subtitle sidebar open, clicking outside the sidebar can refocus mpv so native mpv keybindings continue to work. +- [x] #3 Focused regression coverage exists for overlay-layout sidebar passthrough behavior on mouse-ignore state changes. + + +## Implementation Plan + + +1. Add renderer regression coverage for overlay-layout subtitle sidebar passthrough so open-but-unhovered sidebar no longer holds global mouse interaction. +2. Update overlay mouse-ignore gating to keep the subtitle sidebar interactive only while hovered or otherwise actively interacting, instead of treating overlay layout as a blocking modal. +3. Run focused renderer tests for subtitle sidebar and mouse-ignore behavior, then update task notes/criteria with the verified outcome. + + +## Implementation Notes + + +Confirmed the regression only affects the default overlay-layout subtitle sidebar: open sidebar state was treated as a blocking overlay modal, which prevented click-through outside the sidebar and stranded native mpv keybindings until focus was manually recovered. + +Added a failing regression in src/renderer/modals/subtitle-sidebar.test.ts for overlay-layout passthrough before changing the gate. + +Verification: bun test src/renderer/modals/subtitle-sidebar.test.ts src/renderer/overlay-mouse-ignore.test.ts; bun run typecheck + +User reported the first renderer-only fix did not resolve the macOS issue in practice. Reopening investigation to trace visible-overlay window focus and hit-testing outside the renderer mouse-ignore gate. + +Follow-up root cause: sidebar hover handlers were attached to the full-screen `.subtitle-sidebar-modal` shell instead of the actual sidebar panel. On the transparent visible overlay that shell spans the viewport, so sidebar-active state could persist outside the panel and keep the overlay interactive longer than intended. + +Updated the sidebar modal to track hover/focus on `subtitleSidebarContent` and derive sidebar interaction state from panel hover or focus-within before recomputing mouse passthrough. + +Verification refresh: bun test src/renderer/modals/subtitle-sidebar.test.ts src/renderer/overlay-mouse-ignore.test.ts; bun run typecheck + + +## Final Summary + + +Restored overlay subtitle sidebar passthrough in two layers. First, the visible overlay mouse-ignore gate no longer treats the subtitle sidebar as a global blocking modal. Second, the sidebar panel now tracks interaction on the real sidebar content instead of the full-screen modal shell, and keeps itself active only while the panel is hovered or focused. Added regressions for overlay-layout passthrough and focus-within behavior. Verification: `bun test src/renderer/modals/subtitle-sidebar.test.ts src/renderer/overlay-mouse-ignore.test.ts` and `bun run typecheck`. + diff --git a/backlog/tasks/task-251 - Docs-add-subtitle-sidebar-and-Jimaku-integration-pages.md b/backlog/tasks/task-251 - Docs-add-subtitle-sidebar-and-Jimaku-integration-pages.md new file mode 100644 index 0000000..8ec8e00 --- /dev/null +++ b/backlog/tasks/task-251 - Docs-add-subtitle-sidebar-and-Jimaku-integration-pages.md @@ -0,0 +1,32 @@ +--- +id: TASK-251 +title: 'Docs: add subtitle sidebar and Jimaku integration pages' +status: Done +assignee: [] +created_date: '2026-03-29 22:36' +updated_date: '2026-03-29 22:38' +labels: + - docs +dependencies: [] +priority: medium +--- + +## Description + + +Track the docs-site update that adds a dedicated subtitle sidebar page, links Jimaku integration from the homepage/config docs, and refreshes the docs-site theme styling used by those pages. + + +## Acceptance Criteria + +- [x] #1 docs-site nav includes a Subtitle Sidebar entry +- [x] #2 Subtitle Sidebar page documents layout, shortcut, and config options +- [x] #3 Jimaku integration page and configuration docs link to the new docs page +- [x] #4 Changelog fragment exists for the user-visible docs release note + + +## Final Summary + + +Added the subtitle sidebar docs page and nav entry, linked Jimaku integration from the homepage/config docs, refreshed docs-site styling tokens, and recorded the release note fragment. Verified with `bun run changelog:lint`, `bun run docs:test`, `bun run docs:build`, and `bun run build`. Full repo test gate still has pre-existing failures in `bun run test:fast` and `bun run test:env` unrelated to these docs changes. + diff --git a/bun.lock b/bun.lock index ec313d7..b14c76c 100644 --- a/bun.lock +++ b/bun.lock @@ -7,10 +7,9 @@ "dependencies": { "@fontsource-variable/geist": "^5.2.8", "@fontsource-variable/geist-mono": "^5.2.7", - "@hono/node-server": "^1.19.11", + "@xhayper/discord-rpc": "^1.3.3", "axios": "^1.13.5", "commander": "^14.0.3", - "discord-rpc": "^4.0.1", "hono": "^4.12.7", "jsonc-parser": "^3.3.1", "libsql": "^0.5.22", @@ -38,6 +37,12 @@ "@develar/schema-utils": ["@develar/schema-utils@2.6.5", "", { "dependencies": { "ajv": "^6.12.0", "ajv-keywords": "^3.4.1" } }, "sha512-0cp4PsWQ/9avqTVMCtZ+GirikIA36ikvjtHweU4/j8yLtgObI0+JUPhYFScgwlteveGB1rt3Cm8UhN04XayDig=="], + "@discordjs/collection": ["@discordjs/collection@2.1.1", "", {}, "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg=="], + + "@discordjs/rest": ["@discordjs/rest@2.6.1", "", { "dependencies": { "@discordjs/collection": "^2.1.1", "@discordjs/util": "^1.2.0", "@sapphire/async-queue": "^1.5.3", "@sapphire/snowflake": "^3.5.5", "@vladfrangu/async_event_emitter": "^2.4.6", "discord-api-types": "^0.38.40", "magic-bytes.js": "^1.13.0", "tslib": "^2.6.3", "undici": "6.24.1" } }, "sha512-wwQdgjeaoYFiaG+atbqx6aJDpqW7JHAo0HrQkBTbYzM3/PJ3GweQIpgElNcGZ26DCUOXMyawYd0YF7vtr+fZXg=="], + + "@discordjs/util": ["@discordjs/util@1.2.0", "", { "dependencies": { "discord-api-types": "^0.38.33" } }, "sha512-3LKP7F2+atl9vJFhaBjn4nOaSWahZ/yWjOvA4e5pnXkt2qyXRCHLxoBQy81GFtLGCq7K9lPm9R517M1U+/90Qg=="], + "@electron/asar": ["@electron/asar@3.4.1", "", { "dependencies": { "commander": "^5.0.0", "glob": "^7.1.6", "minimatch": "^3.0.4" }, "bin": { "asar": "bin/asar.js" } }, "sha512-i4/rNPRS84t0vSRa2HorerGRXWyF4vThfHesw0dmcWHp+cspK743UanA0suA5Q5y8kzY2y6YKrvbIUn69BCAiA=="], "@electron/fuses": ["@electron/fuses@1.8.0", "", { "dependencies": { "chalk": "^4.1.1", "fs-extra": "^9.0.1", "minimist": "^1.2.5" }, "bin": { "electron-fuses": "dist/bin.js" } }, "sha512-zx0EIq78WlY/lBb1uXlziZmDZI4ubcCXIMJ4uGjXzZW0nS19TjSPeXPAjzzTmKQlJUZm0SbmZhPKP7tuQ1SsEw=="], @@ -110,8 +115,6 @@ "@fontsource-variable/geist-mono": ["@fontsource-variable/geist-mono@5.2.7", "", {}, "sha512-ZKlZ5sjtalb2TwXKs400mAGDlt/+2ENLNySPx0wTz3bP3mWARCsUW+rpxzZc7e05d2qGch70pItt3K4qttbIYA=="], - "@hono/node-server": ["@hono/node-server@1.19.11", "", { "peerDependencies": { "hono": "^4" } }, "sha512-dr8/3zEaB+p0D2n/IUrlPF1HZm586qgJNXK1a9fhg/PzdtkK7Ksd5l312tJX2yBuALqDYBlG20QEbayqPyxn+g=="], - "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], @@ -146,6 +149,10 @@ "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], + "@sapphire/async-queue": ["@sapphire/async-queue@1.5.5", "", {}, "sha512-cvGzxbba6sav2zZkH8GPf2oGk9yYoD5qrNWdu9fRehifgnFZJMV+nuy2nON2roRO4yQQ+v7MK/Pktl/HgfsUXg=="], + + "@sapphire/snowflake": ["@sapphire/snowflake@3.5.5", "", {}, "sha512-xzvBr1Q1c4lCe7i6sRnrofxeO1QTP/LKQ6A6qy0iB4x5yfiSfARMEQEghojzTNALDTcv8En04qYNIco9/K9eZQ=="], + "@sindresorhus/is": ["@sindresorhus/is@4.6.0", "", {}, "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw=="], "@szmarczak/http-timer": ["@szmarczak/http-timer@4.0.6", "", { "dependencies": { "defer-to-connect": "^2.0.0" } }, "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w=="], @@ -174,6 +181,10 @@ "@types/yauzl": ["@types/yauzl@2.10.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q=="], + "@vladfrangu/async_event_emitter": ["@vladfrangu/async_event_emitter@2.4.7", "", {}, "sha512-Xfe6rpCTxSxfbswi/W/Pz7zp1WWSNn4A0eW4mLkQUewCrXXtMj31lCg+iQyTkh/CkusZSq9eDflu7tjEDXUY6g=="], + + "@xhayper/discord-rpc": ["@xhayper/discord-rpc@1.3.3", "", { "dependencies": { "@discordjs/rest": "^2.6.1", "@vladfrangu/async_event_emitter": "^2.4.7", "discord-api-types": "^0.38.42", "ws": "^8.20.0" } }, "sha512-Ih48GHiua7TtZgKO+f0uZPhCeQqb84fY2qUys/oMh8UbUfiUkUJLVCmd/v2AK0/pV33euh0aqSXo7+9LiPSwGw=="], + "@xmldom/xmldom": ["@xmldom/xmldom@0.8.11", "", {}, "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw=="], "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], @@ -212,8 +223,6 @@ "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], - "bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="], - "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], "boolean": ["boolean@3.2.0", "", {}, "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw=="], @@ -296,7 +305,7 @@ "dir-compare": ["dir-compare@4.2.0", "", { "dependencies": { "minimatch": "^3.0.5", "p-limit": "^3.1.0 " } }, "sha512-2xMCmOoMrdQIPHdsTawECdNPwlVFB9zGcz3kuhmBO6U3oU+UQjsue0i8ayLKpgBcm+hcXPMVSGUN9d+pvJ6+VQ=="], - "discord-rpc": ["discord-rpc@4.0.1", "", { "dependencies": { "node-fetch": "^2.6.1", "ws": "^7.3.1" }, "optionalDependencies": { "register-scheme": "github:devsnek/node-register-scheme" } }, "sha512-HOvHpbq5STRZJjQIBzwoKnQ0jHplbEWFWlPDwXXKm/bILh4nzjcg7mNqll0UY7RsjFoaXA7e/oYb/4lvpda2zA=="], + "discord-api-types": ["discord-api-types@0.38.43", "", {}, "sha512-sSoBf/nK6m7BGtw65mi+QBuvEWaHE8MMziFLqWL+gT6ME/BLg34dRSVKS3Husx40uU06bvxUc3/X+D9Y6/zAbw=="], "dmg-builder": ["dmg-builder@26.8.2", "", { "dependencies": { "app-builder-lib": "26.8.2", "builder-util": "26.8.1", "fs-extra": "^10.1.0", "iconv-lite": "^0.6.2", "js-yaml": "^4.1.0" }, "optionalDependencies": { "dmg-license": "^1.0.11" } }, "sha512-DaWI+p4DOqiFVZFMovdGYammBOyJAiHHFWUTQ0Z7gNc0twfdIN0LvyJ+vFsgZEDR1fjgbpCj690IVtbYIsZObQ=="], @@ -362,8 +371,6 @@ "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="], - "file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="], - "filelist": ["filelist@1.0.6", "", { "dependencies": { "minimatch": "^5.0.1" } }, "sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA=="], "follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="], @@ -480,6 +487,8 @@ "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], + "magic-bytes.js": ["magic-bytes.js@1.13.0", "", {}, "sha512-afO2mnxW7GDTXMm5/AoN1WuOcdoKhtgXjIvHmobqTD1grNplhGdv3PFOyjCVmrnOZBIT/gD/koDKpYG+0mvHcg=="], + "make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="], "matcher": ["matcher@3.0.0", "", { "dependencies": { "escape-string-regexp": "^4.0.0" } }, "sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng=="], @@ -526,8 +535,6 @@ "node-api-version": ["node-api-version@0.2.1", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-2xP/IGGMmmSQpI1+O/k72jF/ykvZ89JeuKX3TLJAYPDVLUalrshrLHkeVcCCZqG/eEa635cr8IBYzgnDvM2O8Q=="], - "node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], - "node-gyp": ["node-gyp@11.5.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-ra7Kvlhxn5V9Slyus0ygMa2h+UqExPqUIkfk7Pc8QTLT956JLSy51uWFwHtIYy0vI8cB4BDhc/S03+880My/LQ=="], "nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], @@ -590,8 +597,6 @@ "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - "register-scheme": ["register-scheme@github:devsnek/node-register-scheme#e7cc9a6", { "dependencies": { "bindings": "^1.3.0", "node-addon-api": "^1.3.0" } }, "devsnek-node-register-scheme-e7cc9a6", "sha512-VwUWN3aKIg/yn7T8axW20Y1+4wGALIQectBmkmwSJfLrCycpVepGP/+KHjXSL/Ga8N1SmewL49kESgIhW7HbWg=="], - "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], "resedit": ["resedit@1.7.2", "", { "dependencies": { "pe-library": "^0.4.1" } }, "sha512-vHjcY2MlAITJhC0eRD/Vv8Vlgmu9Sd3LX9zZvtGzU5ZImdTN3+d6e/4mnTyV8vEbyf1sgNIrWxhWlrys52OkEA=="], @@ -676,14 +681,16 @@ "tmp-promise": ["tmp-promise@3.0.3", "", { "dependencies": { "tmp": "^0.2.0" } }, "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ=="], - "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], - "truncate-utf8-bytes": ["truncate-utf8-bytes@1.0.2", "", { "dependencies": { "utf8-byte-length": "^1.0.1" } }, "sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ=="], + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "type-fest": ["type-fest@0.13.1", "", {}, "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg=="], "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], + "undici": ["undici@6.24.1", "", {}, "sha512-sC+b0tB1whOCzbtlx20fx3WgCXwkW627p4EA9uM+/tNNPkSS+eSEld6pAs9nDv7WbY1UUljBMYPtu9BCOrCWKA=="], + "undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="], "unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="], @@ -702,10 +709,6 @@ "wcwidth": ["wcwidth@1.0.1", "", { "dependencies": { "defaults": "^1.0.3" } }, "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg=="], - "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], - - "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], - "which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], "wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], @@ -772,8 +775,6 @@ "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - "discord-rpc/ws": ["ws@7.5.10", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ=="], - "electron/@types/node": ["@types/node@22.19.15", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg=="], "electron-winstaller/fs-extra": ["fs-extra@7.0.1", "", { "dependencies": { "graceful-fs": "^4.1.2", "jsonfile": "^4.0.0", "universalify": "^0.1.0" } }, "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw=="], diff --git a/changes/251-docs-site-sidebar.md b/changes/251-docs-site-sidebar.md new file mode 100644 index 0000000..ef61782 --- /dev/null +++ b/changes/251-docs-site-sidebar.md @@ -0,0 +1,6 @@ +type: docs +area: docs-site + +- Added a dedicated Subtitle Sidebar guide and linked it from the homepage and configuration docs. +- Linked Jimaku integration from the homepage to its dedicated docs page. +- Refreshed docs-site theme tokens and hover/selection styling for the updated pages. diff --git a/changes/252-youtube-playback-socket-path.md b/changes/252-youtube-playback-socket-path.md new file mode 100644 index 0000000..71e099b --- /dev/null +++ b/changes/252-youtube-playback-socket-path.md @@ -0,0 +1,5 @@ +type: fixed +area: main + +- Resolve the YouTube playback socket path lazily so startup honors CLI and config overrides. +- Add regression coverage for the lazy socket-path lookup during Windows mpv startup. diff --git a/config.example.jsonc b/config.example.jsonc index 2f5c223..de4e143 100644 --- a/config.example.jsonc +++ b/config.example.jsonc @@ -498,6 +498,7 @@ // ========================================== "discordPresence": { "enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false + "presenceStyle": "default", // Presence card text preset: "default" (clean bilingual), "meme" (Mining and crafting), "japanese" (fully JP), or "minimal". "updateIntervalMs": 3000, // Minimum interval between presence payload updates. "debounceMs": 750 // Debounce delay used to collapse bursty presence updates. }, // Optional Discord Rich Presence activity card updates for current playback/study session. diff --git a/docs-site/.vitepress/config.ts b/docs-site/.vitepress/config.ts index 62e74b0..f3d282b 100644 --- a/docs-site/.vitepress/config.ts +++ b/docs-site/.vitepress/config.ts @@ -74,7 +74,9 @@ export default { { text: 'Configuration', link: '/configuration' }, { text: 'Keyboard Shortcuts', link: '/shortcuts' }, { text: 'Subtitle Annotations', link: '/subtitle-annotations' }, + { text: 'Subtitle Sidebar', link: '/subtitle-sidebar' }, { text: 'Immersion Tracking', link: '/immersion-tracking' }, + { text: 'JLPT Vocabulary Bundle', link: '/jlpt-vocab-bundle' }, { text: 'Troubleshooting', link: '/troubleshooting' }, ], }, diff --git a/docs-site/.vitepress/theme/index.ts b/docs-site/.vitepress/theme/index.ts index 1b21d17..bfd6185 100644 --- a/docs-site/.vitepress/theme/index.ts +++ b/docs-site/.vitepress/theme/index.ts @@ -10,13 +10,18 @@ let mermaidLoader: Promise | null = null; let plausibleTrackerInitialized = false; const MERMAID_MODAL_ID = 'mermaid-diagram-modal'; const PLAUSIBLE_DOMAIN = 'subminer.moe'; -const PLAUSIBLE_ENDPOINT = 'https://worker.subminer.moe/api/event'; +const PLAUSIBLE_ENABLED_HOSTNAMES = new Set(['docs.subminer.moe']); +const PLAUSIBLE_ENDPOINT = 'https://worker.subminer.moe/api/capture'; async function initPlausibleTracker() { if (typeof window === 'undefined' || plausibleTrackerInitialized) { return; } + if (!PLAUSIBLE_ENABLED_HOSTNAMES.has(window.location.hostname)) { + return; + } + const { init } = await import('@plausible-analytics/tracker'); init({ domain: PLAUSIBLE_DOMAIN, diff --git a/docs-site/.vitepress/theme/tui-theme.css b/docs-site/.vitepress/theme/tui-theme.css index a98a239..1ff3b2f 100644 --- a/docs-site/.vitepress/theme/tui-theme.css +++ b/docs-site/.vitepress/theme/tui-theme.css @@ -34,6 +34,25 @@ system-ui, sans-serif; --tui-transition: 180ms ease; + + /* Theme-specific values — overridden in .dark below */ + --tui-nav-bg: color-mix(in srgb, var(--vp-c-bg-alt) 88%, transparent); + --tui-table-hover-bg: color-mix(in srgb, var(--vp-c-bg-soft) 80%, transparent); + --tui-link-underline: color-mix(in srgb, var(--vp-c-brand-1) 40%, transparent); + --tui-selection-bg: hsla(267, 83%, 45%, 0.14); + --tui-hero-glow: hsla(267, 83%, 45%, 0.05); + --tui-step-hover-bg: var(--vp-c-bg-alt); + --tui-step-hover-glow: color-mix(in srgb, var(--vp-c-brand-1) 30%, transparent); +} + +.dark { + --tui-nav-bg: hsla(232, 23%, 18%, 0.82); + --tui-table-hover-bg: hsla(232, 23%, 18%, 0.4); + --tui-link-underline: hsla(267, 83%, 80%, 0.3); + --tui-selection-bg: hsla(267, 83%, 80%, 0.22); + --tui-hero-glow: hsla(267, 83%, 80%, 0.06); + --tui-step-hover-bg: hsla(232, 23%, 18%, 0.6); + --tui-step-hover-glow: hsla(267, 83%, 80%, 0.3); } :root { @@ -48,7 +67,7 @@ /* === Selection === */ ::selection { - background: hsla(267, 83%, 80%, 0.22); + background: var(--tui-selection-bg); color: var(--vp-c-text-1); } @@ -102,7 +121,7 @@ button, } .VPNav .VPNavBar:not(.has-sidebar) { - background: hsla(232, 23%, 18%, 0.82); + background: var(--tui-nav-bg); } .VPNav .VPNavBar.has-sidebar .content { @@ -245,13 +264,13 @@ button, } .vp-doc table tr:hover td { - background: hsla(232, 23%, 18%, 0.4); + background: var(--tui-table-hover-bg); } /* === Links === */ .vp-doc a { text-decoration: none; - border-bottom: 1px solid hsla(267, 83%, 80%, 0.3); + border-bottom: 1px solid var(--tui-link-underline); transition: border-color var(--tui-transition), color var(--tui-transition); } @@ -653,7 +672,7 @@ body { height: 400px; background: radial-gradient( ellipse at center, - hsla(267, 83%, 80%, 0.06) 0%, + var(--tui-hero-glow) 0%, transparent 70% ); pointer-events: none; diff --git a/docs-site/architecture.md b/docs-site/architecture.md index 97a01ac..9708608 100644 --- a/docs-site/architecture.md +++ b/docs-site/architecture.md @@ -39,6 +39,7 @@ src/ types.ts # Shared type definitions main/ # Main-process composition/runtime adapters app-lifecycle.ts # App lifecycle + app-ready runtime runner factories + character-dictionary-runtime.ts # Character-dictionary orchestration/public runtime API cli-runtime.ts # CLI command runtime service adapters config-validation.ts # Startup/hot-reload config error formatting and fail-fast helpers dependencies.ts # Shared dependency builders for IPC/runtime services @@ -53,6 +54,7 @@ src/ startup-lifecycle.ts # Lifecycle runtime runner adapter state.ts # Application runtime state container + reducer transitions subsync-runtime.ts # Subsync command runtime adapter + character-dictionary-runtime/ # Character-dictionary fetch/build/cache modules + focused tests runtime/ composers/ # High-level composition clusters used by main.ts domains/ # Domain barrel exports (startup/overlay/mpv/jellyfin/...) diff --git a/docs-site/changelog.md b/docs-site/changelog.md index b5412d7..750d53f 100644 --- a/docs-site/changelog.md +++ b/docs-site/changelog.md @@ -1,5 +1,19 @@ # Changelog +## v0.10.0 (2026-03-29) +- Fixed stats startup so the immersion tracker can run when `Bun.serve` is unavailable. +- Added a Node `http` fallback for Electron/runtime paths that do not expose Bun, so stats keeps working there too. +- Updated Discord Rich Presence to the maintained `@xhayper/discord-rpc` wrapper. +- Fixed the macOS visible-overlay toggle path so manual hides stay hidden and the plugin uses the explicit visible-overlay toggle command. +- Restored macOS mpv passthrough while the overlay subtitle sidebar is open so clicks outside the sidebar can refocus mpv and keep native keybindings working. + +## v0.9.3 (2026-03-25) +- Moved YouTube primary subtitle language defaults to `youtube.primarySubLanguages`. +- Removed the placeholder YouTube subtitle retime step; downloaded primary subtitle tracks are now used directly. +- Removed the old internal YouTube retime helper and its tests. +- Clarified optional `alass` / `ffsubsync` subtitle-sync setup and fallback behavior in the docs. +- Removed the legacy `youtubeSubgen.primarySubLanguages` config path from generated config and docs. + ## v0.9.2 (2026-03-25) - Fixed overlay pointer tracking so Windows click-through toggles immediately when the cursor enters or leaves subtitle regions. - Fixed Windows overlay window tracking on scaled displays by converting native tracked window bounds to Electron DIP coordinates. diff --git a/docs-site/configuration.md b/docs-site/configuration.md index d06654d..0b6e2b6 100644 --- a/docs-site/configuration.md +++ b/docs-site/configuration.md @@ -390,6 +390,8 @@ The sidebar is only available when the active subtitle source has been parsed in `embedded` layout is intended to act like a split-pane view: it reserves player space with a right-side video margin and keeps interaction in both the player area and sidebar. If you see unexpected offset behavior in your environment, switch back to `overlay` to isolate sidebar placement. +For full details on layout modes, behavior, and the keyboard shortcut, see the [Subtitle Sidebar](/subtitle-sidebar) page. + `jlptColors` keys are: | Key | Default | Description | @@ -1197,30 +1199,38 @@ Discord Rich Presence is optional and disabled by default. When enabled, SubMine { "discordPresence": { "enabled": true, + "presenceStyle": "default", "updateIntervalMs": 3000, "debounceMs": 750 } } ``` -| Option | Values | Description | -| ------------------ | --------------- | ---------------------------------------------------------- | -| `enabled` | `true`, `false` | Enable Discord Rich Presence updates (default: `false`) | -| `updateIntervalMs` | number | Minimum interval between activity updates in milliseconds | -| `debounceMs` | number | Debounce window for bursty playback events in milliseconds | +| Option | Values | Description | +| ------------------ | ------------------------------------------------- | ---------------------------------------------------------- | +| `enabled` | `true`, `false` | Enable Discord Rich Presence updates (default: `false`) | +| `presenceStyle` | `"default"`, `"meme"`, `"japanese"`, `"minimal"` | Card text preset (default: `"default"`) | +| `updateIntervalMs` | number | Minimum interval between activity updates in milliseconds | +| `debounceMs` | number | Debounce window for bursty playback events in milliseconds | Setup steps: 1. Set `discordPresence.enabled` to `true`. -2. Restart SubMiner. +2. Optionally set `discordPresence.presenceStyle` to choose a card text preset. +3. Restart SubMiner. -SubMiner uses a fixed official activity card style for all users: +#### Presence style presets -- Details: current media title while playing (fallback: `Mining and crafting (Anki cards)` when idle/disconnected) -- State: `Playing mm:ss / mm:ss` or `Paused mm:ss / mm:ss` (fallback: `Idle`) -- Large image key/text: `subminer-logo` / `SubMiner` -- Small image key/text: `study` / `Sentence Mining` -- No activity button by default +While playing media, the **Details** line always shows the current media title and **State** shows `Playing mm:ss / mm:ss` or `Paused mm:ss / mm:ss`. The preset controls what appears when idle and the tooltip text on images. + +| Preset | Idle details | Small image text | Vibe | +| ------------ | ----------------------------------- | ------------------ | --------------------------------------- | +| **`default`**| `Sentence Mining` | `日本語学習中` | Clean, bilingual flair | +| `meme` | `Mining and crafting (Anki cards)` | `Sentence Mining` | Minecraft-inspired joke | +| `japanese` | `文の採掘中` | `イマージョン学習` | Fully Japanese | +| `minimal` | `SubMiner` | *(none)* | Bare essentials, no small image overlay | + +All presets use the `subminer-logo` large image with `SubMiner` tooltip. No activity button is shown by default. Troubleshooting: diff --git a/docs-site/index.md b/docs-site/index.md index 62c98e2..f11b8d1 100644 --- a/docs-site/index.md +++ b/docs-site/index.md @@ -67,7 +67,7 @@ features: alt: Subtitle download icon title: Subtitle Download & Sync details: Search and pull subtitles from Jimaku, then auto-sync timing with alass or ffsubsync — all from the overlay. - link: /configuration#jimaku + link: /jimaku-integration linkText: Jimaku integration - icon: src: /assets/tokenization.svg @@ -223,12 +223,12 @@ const demoAssetVersion = '20260223-2'; } .workflow-step:hover { - background: hsla(232, 23%, 18%, 0.6); + background: var(--tui-step-hover-bg); } .workflow-step:hover .step-number { color: var(--vp-c-brand-1); - text-shadow: 0 0 12px hsla(267, 83%, 80%, 0.3); + text-shadow: 0 0 12px var(--tui-step-hover-glow); } .workflow-connector { diff --git a/docs-site/installation.md b/docs-site/installation.md index a2aff07..bac4bcb 100644 --- a/docs-site/installation.md +++ b/docs-site/installation.md @@ -172,7 +172,7 @@ Install `mpv` separately and ensure `mpv.exe` is on `PATH`. `ffmpeg` is still re ### Windows Usage Notes - Launch `SubMiner.exe` once to let the first-run setup flow seed `%APPDATA%\\SubMiner\\config.jsonc`, offer mpv plugin installation, open bundled Yomitan settings, and optionally create `SubMiner mpv` Start Menu/Desktop shortcuts. -- If you use the mpv plugin, leave `binary_path` empty unless SubMiner is installed in a non-standard location. +- First-run mpv plugin installs pin `binary_path` to the current `SubMiner.exe` automatically. Manual plugin configs can leave `binary_path` empty unless SubMiner is installed in a non-standard location. - Windows plugin installs rewrite `socket_path` to `\\.\pipe\subminer-socket`; do not keep `/tmp/subminer-socket` on Windows. - Native window tracking is built in on Windows; no `xdotool`, `xwininfo`, or compositor-specific helper is required. @@ -201,6 +201,7 @@ mpv must be launched with `--input-ipc-server=/tmp/subminer-socket` for SubMiner ::: On Windows, the packaged plugin config is rewritten to `socket_path=\\.\pipe\subminer-socket`. +First-run setup also pins `binary_path` to the current app binary so mpv launches the same SubMiner build that installed the plugin. ```bash # Option 1: install from release assets bundle diff --git a/docs-site/jimaku-integration.md b/docs-site/jimaku-integration.md index 4d74bd6..2c6e9b0 100644 --- a/docs-site/jimaku-integration.md +++ b/docs-site/jimaku-integration.md @@ -131,6 +131,6 @@ Verify mpv is running and connected via IPC. SubMiner loads the subtitle by issu ## Related -- [Configuration Reference](/configuration#jimaku) — full config section +- [Configuration Reference](/configuration#jimaku) — full config options - [Mining Workflow](/mining-workflow#jimaku-subtitle-search) — how Jimaku fits into the sentence mining loop - [Troubleshooting](/troubleshooting#jimaku) — additional error guidance diff --git a/docs-site/plausible.test.ts b/docs-site/plausible.test.ts index df6707f..59dba88 100644 --- a/docs-site/plausible.test.ts +++ b/docs-site/plausible.test.ts @@ -6,14 +6,17 @@ const docsThemePath = new URL('./.vitepress/theme/index.ts', import.meta.url); const docsConfigContents = readFileSync(docsConfigPath, 'utf8'); const docsThemeContents = readFileSync(docsThemePath, 'utf8'); -test('docs site keeps docs hostname while sending plausible events to subminer.moe via worker.subminer.moe', () => { +test('docs site keeps docs hostname while sending plausible events to subminer.moe via worker.subminer.moe capture endpoint', () => { expect(docsConfigContents).toContain("hostname: 'https://docs.subminer.moe'"); expect(docsThemeContents).toContain("const PLAUSIBLE_DOMAIN = 'subminer.moe'"); + expect(docsThemeContents).toContain('const PLAUSIBLE_ENABLED_HOSTNAMES = new Set(['); + expect(docsThemeContents).toContain("'docs.subminer.moe'"); expect(docsThemeContents).toContain( - "const PLAUSIBLE_ENDPOINT = 'https://worker.subminer.moe/api/event'", + "const PLAUSIBLE_ENDPOINT = 'https://worker.subminer.moe/api/capture'", ); expect(docsThemeContents).toContain('@plausible-analytics/tracker'); expect(docsThemeContents).toContain('const { init } = await import'); + expect(docsThemeContents).toContain('!PLAUSIBLE_ENABLED_HOSTNAMES.has(window.location.hostname)'); expect(docsThemeContents).toContain('domain: PLAUSIBLE_DOMAIN'); expect(docsThemeContents).toContain('endpoint: PLAUSIBLE_ENDPOINT'); expect(docsThemeContents).toContain('outboundLinks: true'); diff --git a/docs-site/public/config.example.jsonc b/docs-site/public/config.example.jsonc index 2f5c223..de4e143 100644 --- a/docs-site/public/config.example.jsonc +++ b/docs-site/public/config.example.jsonc @@ -498,6 +498,7 @@ // ========================================== "discordPresence": { "enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false + "presenceStyle": "default", // Presence card text preset: "default" (clean bilingual), "meme" (Mining and crafting), "japanese" (fully JP), or "minimal". "updateIntervalMs": 3000, // Minimum interval between presence payload updates. "debounceMs": 750 // Debounce delay used to collapse bursty presence updates. }, // Optional Discord Rich Presence activity card updates for current playback/study session. diff --git a/docs-site/subtitle-sidebar.md b/docs-site/subtitle-sidebar.md new file mode 100644 index 0000000..42be19a --- /dev/null +++ b/docs-site/subtitle-sidebar.md @@ -0,0 +1,71 @@ +# Subtitle Sidebar + +The subtitle sidebar displays the full parsed cue list for the active subtitle file as a scrollable panel alongside mpv. It lets you review past and upcoming lines, click any cue to seek directly to that moment, and follow along without depending on the transient overlay subtitles. + +The sidebar is opt-in and disabled by default. Enable it under `subtitleSidebar.enabled` in your config. + +## How It Works + +When SubMiner parses the active subtitle source into a cue list, the sidebar becomes available. Toggle it with the `\` key (configurable via `subtitleSidebar.toggleKey`). While open: + +- The active cue is highlighted and kept in view as playback advances (when `autoScroll` is `true`). +- Clicking any cue seeks mpv to that timestamp. +- The sidebar stays synchronized with the overlay — media transitions and subtitle source changes update both simultaneously. + +The sidebar only appears when a parsed cue list is available. External subtitle sources that SubMiner cannot parse (for example, embedded ASS tracks rendered directly by mpv) will not populate the sidebar. + +## Layout Modes + +Two layout modes are available via `subtitleSidebar.layout`: + +**`overlay`** (default) — The sidebar floats over mpv as a panel. It does not affect the player window size or position. + +**`embedded`** — Reserves space on the right side of the player and shifts the video area to mimic a split-pane layout. Useful if you want the cue list visible without it covering the video. If you see unexpected positioning in your environment, switch back to `overlay` to isolate the issue. + +## Configuration + +Enable and configure the sidebar under `subtitleSidebar` in your config file: + +```json +{ + "subtitleSidebar": { + "enabled": false, + "autoOpen": false, + "layout": "overlay", + "toggleKey": "Backslash", + "pauseVideoOnHover": false, + "autoScroll": true, + "fontFamily": "\"M PLUS 1\", \"Noto Sans CJK JP\", sans-serif", + "fontSize": 16 + } +} +``` + +| Option | Type | Default | Description | +| --------------------------- | ------- | ------------ | -------------------------------------------------------------------------------------------------- | +| `enabled` | boolean | `false` | Enable subtitle sidebar support | +| `autoOpen` | boolean | `false` | Open the sidebar automatically on overlay startup | +| `layout` | string | `"overlay"` | `"overlay"` floats over mpv; `"embedded"` reserves right-side player space | +| `toggleKey` | string | `"Backslash"` | `KeyboardEvent.code` for the toggle shortcut | +| `pauseVideoOnHover` | boolean | `false` | Pause playback while hovering the cue list | +| `autoScroll` | boolean | `true` | Keep the active cue in view during playback | +| `maxWidth` | number | `420` | Maximum sidebar width in CSS pixels | +| `opacity` | number | `0.95` | Sidebar opacity between `0` and `1` | +| `backgroundColor` | string | — | Sidebar shell background color | +| `textColor` | string | — | Default cue text color | +| `fontFamily` | string | — | CSS `font-family` applied to cue text | +| `fontSize` | number | `16` | Base cue font size in CSS pixels | +| `timestampColor` | string | — | Cue timestamp color | +| `activeLineColor` | string | — | Active cue text color | +| `activeLineBackgroundColor` | string | — | Active cue background color | +| `hoverLineBackgroundColor` | string | — | Hovered cue background color | + +Default colors use Catppuccin Macchiato with a semi-transparent shell so the panel stays readable without feeling like a solid overlay. + +## Keyboard Shortcut + +| Key | Action | Config key | +| --- | ----------------------- | ------------------------------ | +| `\` | Toggle subtitle sidebar | `subtitleSidebar.toggleKey` | + +The toggle is overlay-local and only opens when SubMiner has a parsed cue list for the active subtitle source. See [Keyboard Shortcuts](/shortcuts) for the full shortcut reference. diff --git a/docs/README.md b/docs/README.md index 229f826..75ed9d6 100644 --- a/docs/README.md +++ b/docs/README.md @@ -21,6 +21,7 @@ Read when: you need internal architecture, workflow, verification, or release gu - New feature or refactor: [Workflow](./workflow/README.md), then [Architecture](./architecture/README.md) - Test/build/release work: [Verification](./workflow/verification.md), then [Release Guide](./RELEASING.md) +- Coverage lane selection or LCOV artifact path: [Verification](./workflow/verification.md) - “What owns this behavior?”: [Domains](./architecture/domains.md) - “Can these modules depend on each other?”: [Layering](./architecture/layering.md) - “What doc should exist for this?”: [Catalog](./knowledge-base/catalog.md) diff --git a/docs/architecture/README.md b/docs/architecture/README.md index ea23380..c384176 100644 --- a/docs/architecture/README.md +++ b/docs/architecture/README.md @@ -3,7 +3,7 @@ # Architecture Map Status: active -Last verified: 2026-03-13 +Last verified: 2026-03-26 Owner: Kyle Yasuda Read when: runtime ownership, composition boundaries, or layering questions @@ -24,9 +24,11 @@ The desktop app keeps `src/main.ts` as composition root and pushes behavior into ## Current Shape - `src/main/` owns composition, runtime setup, IPC wiring, and app lifecycle adapters. +- `src/main/boot/` owns boot-phase assembly seams so `src/main.ts` can stay focused on lifecycle coordination and startup-path selection. - `src/core/services/` owns focused runtime services plus pure or side-effect-bounded logic. - `src/renderer/` owns overlay rendering and input behavior. - `src/config/` owns config definitions, defaults, loading, and resolution. +- `src/types/` owns shared cross-runtime contracts via domain entrypoints; `src/types.ts` stays a compatibility barrel. - `src/main/runtime/composers/` owns larger domain compositions. ## Architecture Intent diff --git a/docs/architecture/domains.md b/docs/architecture/domains.md index 0a910ac..9f4a7e6 100644 --- a/docs/architecture/domains.md +++ b/docs/architecture/domains.md @@ -3,7 +3,7 @@ # Domain Ownership Status: active -Last verified: 2026-03-13 +Last verified: 2026-03-26 Owner: Kyle Yasuda Read when: you need to find the owner module for a behavior or test surface @@ -23,17 +23,28 @@ Read when: you need to find the owner module for a behavior or test surface - Anki workflow: `src/anki-integration/`, `src/core/services/anki-jimaku*.ts` - Immersion tracking: `src/core/services/immersion-tracker/` Includes stats storage/query schema such as `imm_videos`, `imm_media_art`, and `imm_youtube_videos` for per-video and YouTube-specific library metadata. -- AniList tracking: `src/core/services/anilist/`, `src/main/runtime/composers/anilist-*` +- AniList tracking + character dictionary: `src/core/services/anilist/`, `src/main/runtime/composers/anilist-*`, `src/main/character-dictionary-runtime.ts`, `src/main/character-dictionary-runtime/` - Jellyfin integration: `src/core/services/jellyfin*.ts`, `src/main/runtime/composers/jellyfin-*` - Window trackers: `src/window-trackers/` - Stats app: `stats/` - Public docs site: `docs-site/` +## Shared Contract Entry Points + +- Config + app-state contracts: `src/types/config.ts` +- Subtitle/token/media annotation contracts: `src/types/subtitle.ts` +- Runtime/window/controller/Electron bridge contracts: `src/types/runtime.ts` +- Anki-specific contracts: `src/types/anki.ts` +- External integration contracts: `src/types/integrations.ts` +- Runtime-option contracts: `src/types/runtime-options.ts` +- Compatibility-only barrel: `src/types.ts` + ## Ownership Heuristics - Runtime wiring or dependency setup: start in `src/main/` - Business logic or service behavior: start in `src/core/services/` - UI interaction or overlay DOM behavior: start in `src/renderer/` - Command parsing or mpv launch flow: start in `launcher/` +- Shared contract changes: add or edit the narrowest `src/types/.ts` entrypoint; only touch `src/types.ts` for compatibility exports. - User-facing docs: `docs-site/` - Internal process/docs: `docs/` diff --git a/docs/workflow/README.md b/docs/workflow/README.md index df4e327..4143402 100644 --- a/docs/workflow/README.md +++ b/docs/workflow/README.md @@ -13,6 +13,7 @@ This section is the internal workflow map for contributors and agents. - [Planning](./planning.md) - when to write a lightweight plan vs a full execution plan - [Verification](./verification.md) - maintained test/build lanes and handoff gate +- [Agent Plugins](./agent-plugins.md) - repo-local plugin ownership for agent workflow skills - [Release Guide](../RELEASING.md) - tagged release workflow ## Default Flow diff --git a/docs/workflow/agent-plugins.md b/docs/workflow/agent-plugins.md new file mode 100644 index 0000000..44d6238 --- /dev/null +++ b/docs/workflow/agent-plugins.md @@ -0,0 +1,32 @@ + + +# Agent Plugins + +Status: active +Last verified: 2026-03-26 +Owner: Kyle Yasuda +Read when: packaging or migrating repo-local agent workflow skills into plugins + +## SubMiner Workflow Plugin + +- Canonical plugin path: `plugins/subminer-workflow/` +- Marketplace catalog: `.agents/plugins/marketplace.json` +- Canonical skill sources: + - `plugins/subminer-workflow/skills/subminer-scrum-master/` + - `plugins/subminer-workflow/skills/subminer-change-verification/` + +## Migration Rule + +- Plugin-owned skills are the source of truth. +- `.agents/skills/subminer-*` remain only as compatibility shims. +- Existing script entrypoints under `.agents/skills/subminer-change-verification/scripts/` stay as wrappers so historical commands do not break. + +## Backlog + +- Prefer Backlog.md MCP when the host session exposes it. +- If MCP is unavailable, use repo-local `backlog/` files and record that fallback. + +## Verification + +- For plugin/docs-only changes, start with `bun run test:docs:kb`. +- Use the plugin-owned verifier when the change crosses from docs into scripts or workflow logic. diff --git a/docs/workflow/verification.md b/docs/workflow/verification.md index a3212d5..27c8860 100644 --- a/docs/workflow/verification.md +++ b/docs/workflow/verification.md @@ -31,8 +31,15 @@ bun run docs:build - Config/schema/defaults: `bun run test:config`, then `bun run generate:config-example` if template/defaults changed - Launcher/plugin: `bun run test:launcher` or `bun run test:env` - Runtime-compat / compiled behavior: `bun run test:runtime:compat` +- Coverage for the maintained source lane: `bun run test:coverage:src` - Deep/local full gate: default handoff gate above +## Coverage Reporting + +- `bun run test:coverage:src` runs the maintained `test:src` lane through a sharded coverage runner: one Bun coverage process per test file, then merged LCOV output. +- Machine-readable output lands at `coverage/test-src/lcov.info`. +- CI and release quality-gate runs upload that LCOV file as the `coverage-test-src` artifact. + ## Rules - Capture exact failing command and error when verification breaks. diff --git a/launcher/commands/command-modules.test.ts b/launcher/commands/command-modules.test.ts index 751f1ac..4a912e8 100644 --- a/launcher/commands/command-modules.test.ts +++ b/launcher/commands/command-modules.test.ts @@ -227,11 +227,7 @@ test('stats background command launches attached daemon control command with res assert.equal(handled, true); assert.deepEqual(harness.forwarded, [ - [ - '--stats-daemon-start', - '--stats-response-path', - '/tmp/subminer-stats-test/response.json', - ], + ['--stats-daemon-start', '--stats-response-path', '/tmp/subminer-stats-test/response.json'], ]); assert.equal(harness.removedPaths.length, 1); }); @@ -257,11 +253,7 @@ test('stats command waits for attached app exit after startup response', async ( const final = await statsCommand; assert.equal(final, true); assert.deepEqual(harness.forwarded, [ - [ - '--stats', - '--stats-response-path', - '/tmp/subminer-stats-test/response.json', - ], + ['--stats', '--stats-response-path', '/tmp/subminer-stats-test/response.json'], ]); assert.equal(harness.removedPaths.length, 1); }); @@ -317,11 +309,7 @@ test('stats stop command forwards stop flag to the app', async () => { assert.equal(handled, true); assert.deepEqual(harness.forwarded, [ - [ - '--stats-daemon-stop', - '--stats-response-path', - '/tmp/subminer-stats-test/response.json', - ], + ['--stats-daemon-stop', '--stats-response-path', '/tmp/subminer-stats-test/response.json'], ]); assert.equal(harness.removedPaths.length, 1); }); diff --git a/launcher/commands/playback-command.ts b/launcher/commands/playback-command.ts index 81840a3..f0c8da9 100644 --- a/launcher/commands/playback-command.ts +++ b/launcher/commands/playback-command.ts @@ -14,6 +14,7 @@ import { waitForUnixSocketReady, } from '../mpv.js'; import type { Args } from '../types.js'; +import { nowMs } from '../time.js'; import type { LauncherCommandContext } from './context.js'; import { ensureLauncherSetupReady } from '../setup-gate.js'; import { @@ -116,7 +117,7 @@ async function ensurePlaybackSetupReady(context: LauncherCommandContext): Promis child.unref(); }, sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)), - now: () => Date.now(), + now: () => nowMs(), timeoutMs: SETUP_WAIT_TIMEOUT_MS, pollIntervalMs: SETUP_POLL_INTERVAL_MS, }); @@ -209,7 +210,11 @@ export async function runPlaybackCommandWithDeps( pluginRuntimeConfig.autoStartPauseUntilReady; if (shouldPauseUntilOverlayReady) { - deps.log('info', args.logLevel, 'Configured to pause mpv until overlay and tokenization are ready'); + deps.log( + 'info', + args.logLevel, + 'Configured to pause mpv until overlay and tokenization are ready', + ); } await deps.startMpv( @@ -250,7 +255,11 @@ export async function runPlaybackCommandWithDeps( if (ready) { deps.log('info', args.logLevel, 'MPV IPC socket ready, relying on mpv plugin auto-start'); } else { - deps.log('info', args.logLevel, 'MPV IPC socket not ready yet, relying on mpv plugin auto-start'); + deps.log( + 'info', + args.logLevel, + 'MPV IPC socket not ready yet, relying on mpv plugin auto-start', + ); } } else if (ready) { deps.log( diff --git a/launcher/commands/stats-command.ts b/launcher/commands/stats-command.ts index b8e98a4..9751d6f 100644 --- a/launcher/commands/stats-command.ts +++ b/launcher/commands/stats-command.ts @@ -2,6 +2,7 @@ import fs from 'node:fs'; import os from 'node:os'; import path from 'node:path'; import { runAppCommandAttached } from '../mpv.js'; +import { nowMs } from '../time.js'; import { sleep } from '../util.js'; import type { LauncherCommandContext } from './context.js'; @@ -45,8 +46,8 @@ const defaultDeps: StatsCommandDeps = { runAppCommandAttached: (appPath, appArgs, logLevel, label) => runAppCommandAttached(appPath, appArgs, logLevel, label), waitForStatsResponse: async (responsePath, signal) => { - const deadline = Date.now() + STATS_STARTUP_RESPONSE_TIMEOUT_MS; - while (Date.now() < deadline) { + const deadline = nowMs() + STATS_STARTUP_RESPONSE_TIMEOUT_MS; + while (nowMs() < deadline) { if (signal?.aborted) { return { ok: false, diff --git a/launcher/config/args-normalizer.test.ts b/launcher/config/args-normalizer.test.ts new file mode 100644 index 0000000..579e273 --- /dev/null +++ b/launcher/config/args-normalizer.test.ts @@ -0,0 +1,155 @@ +import assert from 'node:assert/strict'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import test from 'node:test'; +import { + applyInvocationsToArgs, + applyRootOptionsToArgs, + createDefaultArgs, +} from './args-normalizer.js'; + +class ExitSignal extends Error { + code: number; + + constructor(code: number) { + super(`exit:${code}`); + this.code = code; + } +} + +function withProcessExitIntercept(callback: () => void): ExitSignal { + const originalExit = process.exit; + try { + process.exit = ((code?: number) => { + throw new ExitSignal(code ?? 0); + }) as typeof process.exit; + callback(); + } catch (error) { + if (error instanceof ExitSignal) { + return error; + } + throw error; + } finally { + process.exit = originalExit; + } + + throw new Error('expected process.exit'); +} + +function withTempDir(fn: (dir: string) => T): T { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-launcher-args-')); + try { + return fn(dir); + } finally { + fs.rmSync(dir, { recursive: true, force: true }); + } +} + +test('createDefaultArgs normalizes configured language codes and env thread override', () => { + const originalThreads = process.env.SUBMINER_WHISPER_THREADS; + process.env.SUBMINER_WHISPER_THREADS = '7'; + + try { + const parsed = createDefaultArgs({ + primarySubLanguages: [' JA ', 'jpn', 'ja'], + secondarySubLanguages: ['en', 'ENG', ''], + whisperThreads: 2, + }); + + assert.deepEqual(parsed.youtubePrimarySubLangs, ['ja', 'jpn']); + assert.deepEqual(parsed.youtubeSecondarySubLangs, ['en', 'eng']); + assert.deepEqual(parsed.youtubeAudioLangs, ['ja', 'jpn', 'en', 'eng']); + assert.equal(parsed.whisperThreads, 7); + assert.equal(parsed.youtubeWhisperSourceLanguage, 'ja'); + } finally { + if (originalThreads === undefined) { + delete process.env.SUBMINER_WHISPER_THREADS; + } else { + process.env.SUBMINER_WHISPER_THREADS = originalThreads; + } + } +}); + +test('applyRootOptionsToArgs maps file, directory, and url targets', () => { + withTempDir((dir) => { + const filePath = path.join(dir, 'movie.mkv'); + const folderPath = path.join(dir, 'anime'); + fs.writeFileSync(filePath, 'x'); + fs.mkdirSync(folderPath); + + const fileParsed = createDefaultArgs({}); + applyRootOptionsToArgs(fileParsed, {}, filePath); + assert.equal(fileParsed.targetKind, 'file'); + assert.equal(fileParsed.target, filePath); + + const dirParsed = createDefaultArgs({}); + applyRootOptionsToArgs(dirParsed, {}, folderPath); + assert.equal(dirParsed.directory, folderPath); + assert.equal(dirParsed.target, ''); + assert.equal(dirParsed.targetKind, ''); + + const urlParsed = createDefaultArgs({}); + applyRootOptionsToArgs(urlParsed, {}, 'https://example.test/video'); + assert.equal(urlParsed.targetKind, 'url'); + assert.equal(urlParsed.target, 'https://example.test/video'); + }); +}); + +test('applyRootOptionsToArgs rejects unsupported targets', () => { + const parsed = createDefaultArgs({}); + + const error = withProcessExitIntercept(() => { + applyRootOptionsToArgs(parsed, {}, '/definitely/missing/subminer-target'); + }); + + assert.equal(error.code, 1); + assert.match(error.message, /exit:1/); +}); + +test('applyInvocationsToArgs maps config and jellyfin invocation state', () => { + const parsed = createDefaultArgs({}); + + applyInvocationsToArgs(parsed, { + jellyfinInvocation: { + action: 'play', + play: true, + server: 'https://jf.example', + username: 'alice', + password: 'secret', + logLevel: 'debug', + }, + configInvocation: { + action: 'show', + logLevel: 'warn', + }, + mpvInvocation: null, + appInvocation: null, + dictionaryTriggered: false, + dictionaryTarget: null, + dictionaryLogLevel: null, + statsTriggered: false, + statsBackground: false, + statsStop: false, + statsCleanup: false, + statsCleanupVocab: false, + statsCleanupLifetime: false, + statsLogLevel: null, + doctorTriggered: false, + doctorLogLevel: null, + doctorRefreshKnownWords: false, + texthookerTriggered: false, + texthookerLogLevel: null, + }); + + assert.equal(parsed.jellyfin, false); + assert.equal(parsed.jellyfinPlay, true); + assert.equal(parsed.jellyfinDiscovery, false); + assert.equal(parsed.jellyfinLogin, false); + assert.equal(parsed.jellyfinLogout, false); + assert.equal(parsed.jellyfinServer, 'https://jf.example'); + assert.equal(parsed.jellyfinUsername, 'alice'); + assert.equal(parsed.jellyfinPassword, 'secret'); + assert.equal(parsed.configShow, true); + assert.equal(parsed.logLevel, 'warn'); +}); diff --git a/launcher/config/cli-parser-builder.test.ts b/launcher/config/cli-parser-builder.test.ts new file mode 100644 index 0000000..110b9ad --- /dev/null +++ b/launcher/config/cli-parser-builder.test.ts @@ -0,0 +1,37 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { parseCliPrograms, resolveTopLevelCommand } from './cli-parser-builder.js'; + +test('resolveTopLevelCommand skips root options and finds the first command', () => { + assert.deepEqual(resolveTopLevelCommand(['--backend', 'macos', 'config', 'show']), { + name: 'config', + index: 2, + }); +}); + +test('resolveTopLevelCommand respects the app alias after root options', () => { + assert.deepEqual(resolveTopLevelCommand(['--log-level', 'debug', 'bin', '--foo']), { + name: 'bin', + index: 2, + }); +}); + +test('parseCliPrograms keeps root options and target when no command is present', () => { + const result = parseCliPrograms(['--backend', 'x11', '/tmp/movie.mkv'], 'subminer'); + + assert.equal(result.options.backend, 'x11'); + assert.equal(result.rootTarget, '/tmp/movie.mkv'); + assert.equal(result.invocations.appInvocation, null); +}); + +test('parseCliPrograms routes app alias arguments through passthrough mode', () => { + const result = parseCliPrograms( + ['--backend', 'macos', 'bin', '--anilist', '--log-level', 'debug'], + 'subminer', + ); + + assert.equal(result.options.backend, 'macos'); + assert.deepEqual(result.invocations.appInvocation, { + appArgs: ['--anilist', '--log-level', 'debug'], + }); +}); diff --git a/launcher/config/cli-parser-builder.ts b/launcher/config/cli-parser-builder.ts index 75221f7..d871c11 100644 --- a/launcher/config/cli-parser-builder.ts +++ b/launcher/config/cli-parser-builder.ts @@ -236,17 +236,12 @@ export function parseCliPrograms( normalizedAction !== 'rebuild' && normalizedAction !== 'backfill' ) { - throw new Error( - 'Invalid stats action. Valid values are cleanup, rebuild, or backfill.', - ); + throw new Error('Invalid stats action. Valid values are cleanup, rebuild, or backfill.'); } if (normalizedAction && (statsBackground || statsStop)) { throw new Error('Stats background and stop flags cannot be combined with stats actions.'); } - if ( - normalizedAction !== 'cleanup' && - (options.vocab === true || options.lifetime === true) - ) { + if (normalizedAction !== 'cleanup' && (options.vocab === true || options.lifetime === true)) { throw new Error('Stats --vocab and --lifetime flags require the cleanup action.'); } if (normalizedAction === 'cleanup') { diff --git a/launcher/jellyfin.ts b/launcher/jellyfin.ts index fccf88c..c3f91db 100644 --- a/launcher/jellyfin.ts +++ b/launcher/jellyfin.ts @@ -10,6 +10,7 @@ import type { JellyfinGroupEntry, } from './types.js'; import { log, fail, getMpvLogPath } from './log.js'; +import { nowMs } from './time.js'; import { commandExists, resolvePathMaybe, sleep } from './util.js'; import { pickLibrary, @@ -453,9 +454,9 @@ async function runAppJellyfinCommand( } return retriedAfterStart ? 12000 : 4000; })(); - const settleDeadline = Date.now() + settleWindowMs; + const settleDeadline = nowMs() + settleWindowMs; const settleOffset = attempt.logOffset; - while (Date.now() < settleDeadline) { + while (nowMs() < settleDeadline) { await sleep(100); const settledOutput = readLogAppendedSince(settleOffset); if (!settledOutput.trim()) { @@ -489,8 +490,8 @@ async function requestJellyfinPreviewAuthFromApp( return null; } - const deadline = Date.now() + 4000; - while (Date.now() < deadline) { + const deadline = nowMs() + 4000; + while (nowMs() < deadline) { try { if (fs.existsSync(responsePath)) { const raw = fs.readFileSync(responsePath, 'utf8'); diff --git a/launcher/log.test.ts b/launcher/log.test.ts index 615934b..fa898d1 100644 --- a/launcher/log.test.ts +++ b/launcher/log.test.ts @@ -14,12 +14,7 @@ test('getDefaultMpvLogFile uses APPDATA on windows', () => { assert.equal( path.normalize(resolved), path.normalize( - path.join( - 'C:\\Users\\tester\\AppData\\Roaming', - 'SubMiner', - 'logs', - `mpv-${today}.log`, - ), + path.join('C:\\Users\\tester\\AppData\\Roaming', 'SubMiner', 'logs', `mpv-${today}.log`), ), ); }); @@ -33,12 +28,6 @@ test('getDefaultLauncherLogFile uses launcher prefix', () => { assert.equal( resolved, - path.join( - '/home/tester', - '.config', - 'SubMiner', - 'logs', - `launcher-${today}.log`, - ), + path.join('/home/tester', '.config', 'SubMiner', 'logs', `launcher-${today}.log`), ); }); diff --git a/launcher/main.test.ts b/launcher/main.test.ts index 7644544..cf7b1f1 100644 --- a/launcher/main.test.ts +++ b/launcher/main.test.ts @@ -36,6 +36,8 @@ function withTempDir(fn: (dir: string) => T): T { } } +const LAUNCHER_RUN_TIMEOUT_MS = 30000; + function runLauncher(argv: string[], env: NodeJS.ProcessEnv): RunResult { const result = spawnSync( process.execPath, @@ -43,6 +45,7 @@ function runLauncher(argv: string[], env: NodeJS.ProcessEnv): RunResult { { env, encoding: 'utf8', + timeout: LAUNCHER_RUN_TIMEOUT_MS, }, ); return { @@ -269,10 +272,7 @@ ${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); con SUBMINER_APPIMAGE_PATH: appPath, SUBMINER_TEST_MPV_ARGS: mpvArgsPath, }; - const result = runLauncher( - ['--args', '--pause=yes --title="movie night"', videoPath], - env, - ); + const result = runLauncher(['--args', '--pause=yes --title="movie night"', videoPath], env); assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`); const argsFile = fs.readFileSync(mpvArgsPath, 'utf8'); @@ -355,10 +355,7 @@ ${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); con const result = runLauncher(['--log-level', 'debug', videoPath], env); assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`); - assert.match( - fs.readFileSync(mpvArgsPath, 'utf8'), - /--script-opts=.*subminer-log_level=debug/, - ); + assert.match(fs.readFileSync(mpvArgsPath, 'utf8'), /--script-opts=.*subminer-log_level=debug/); }); }); diff --git a/launcher/mpv.test.ts b/launcher/mpv.test.ts index be848a3..0f89416 100644 --- a/launcher/mpv.test.ts +++ b/launcher/mpv.test.ts @@ -427,7 +427,10 @@ function withFindAppBinaryEnvSandbox(run: () => void): void { } } -function withAccessSyncStub(isExecutablePath: (filePath: string) => boolean, run: () => void): void { +function withAccessSyncStub( + isExecutablePath: (filePath: string) => boolean, + run: () => void, +): void { const originalAccessSync = fs.accessSync; try { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -468,10 +471,13 @@ test('findAppBinary resolves /opt/SubMiner/SubMiner.AppImage when ~/.local/bin c try { os.homedir = () => baseDir; withFindAppBinaryEnvSandbox(() => { - withAccessSyncStub((filePath) => filePath === '/opt/SubMiner/SubMiner.AppImage', () => { - const result = findAppBinary('/some/other/path/subminer'); - assert.equal(result, '/opt/SubMiner/SubMiner.AppImage'); - }); + withAccessSyncStub( + (filePath) => filePath === '/opt/SubMiner/SubMiner.AppImage', + () => { + const result = findAppBinary('/some/other/path/subminer'); + assert.equal(result, '/opt/SubMiner/SubMiner.AppImage'); + }, + ); }); } finally { os.homedir = originalHomedir; @@ -492,11 +498,14 @@ test('findAppBinary finds subminer on PATH when AppImage candidates do not exist process.env.PATH = `${binDir}${path.delimiter}${originalPath ?? ''}`; withFindAppBinaryEnvSandbox(() => { - withAccessSyncStub((filePath) => filePath === wrapperPath, () => { - // selfPath must differ from wrapperPath so the self-check does not exclude it - const result = findAppBinary(path.join(baseDir, 'launcher', 'subminer')); - assert.equal(result, wrapperPath); - }); + withAccessSyncStub( + (filePath) => filePath === wrapperPath, + () => { + // selfPath must differ from wrapperPath so the self-check does not exclude it + const result = findAppBinary(path.join(baseDir, 'launcher', 'subminer')); + assert.equal(result, wrapperPath); + }, + ); }); } finally { os.homedir = originalHomedir; diff --git a/launcher/mpv.ts b/launcher/mpv.ts index 6305351..bf7c7cd 100644 --- a/launcher/mpv.ts +++ b/launcher/mpv.ts @@ -7,6 +7,7 @@ import type { LogLevel, Backend, Args, MpvTrack } from './types.js'; import { DEFAULT_MPV_SUBMINER_ARGS, DEFAULT_YOUTUBE_YTDL_FORMAT } from './types.js'; import { appendToAppLog, getAppLogPath, log, fail, getMpvLogPath } from './log.js'; import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js'; +import { nowMs } from './time.js'; import { commandExists, getPathEnv, @@ -47,7 +48,11 @@ export function parseMpvArgString(input: string): string[] { let inDoubleQuote = false; let escaping = false; const canEscape = (nextChar: string | undefined): boolean => - nextChar === undefined || nextChar === '"' || nextChar === "'" || nextChar === '\\' || /\s/.test(nextChar); + nextChar === undefined || + nextChar === '"' || + nextChar === "'" || + nextChar === '\\' || + /\s/.test(nextChar); for (let i = 0; i < chars.length; i += 1) { const ch = chars[i] || ''; @@ -196,8 +201,8 @@ async function terminateTrackedDetachedMpv(logLevel: LogLevel): Promise { return; } - const deadline = Date.now() + 1500; - while (Date.now() < deadline) { + const deadline = nowMs() + 1500; + while (nowMs() < deadline) { if (!isProcessAlive(pid)) { clearTrackedDetachedMpvPid(); return; @@ -340,7 +345,7 @@ export function sendMpvCommandWithResponse( timeoutMs = 5000, ): Promise { return new Promise((resolve, reject) => { - const requestId = Date.now() + Math.floor(Math.random() * 1000); + const requestId = nowMs() + Math.floor(Math.random() * 1000); const socket = net.createConnection(socketPath); let buffer = ''; @@ -598,7 +603,9 @@ export async function startMpv( ? await resolveAniSkipMetadataForFile(target) : null; const extraScriptOpts = - targetKind === 'url' && isYoutubeTarget(target) && options?.disableYoutubeSubtitleAutoLoad === true + targetKind === 'url' && + isYoutubeTarget(target) && + options?.disableYoutubeSubtitleAutoLoad === true ? ['subminer-auto_start_pause_until_ready=no'] : []; const scriptOpts = buildSubminerScriptOpts( @@ -1064,7 +1071,9 @@ export function launchMpvIdleDetached( mpvArgs.push(...parseMpvArgString(args.mpvArgs)); } mpvArgs.push('--idle=yes'); - mpvArgs.push(`--script-opts=${buildSubminerScriptOpts(appPath, socketPath, null, args.logLevel)}`); + mpvArgs.push( + `--script-opts=${buildSubminerScriptOpts(appPath, socketPath, null, args.logLevel)}`, + ); mpvArgs.push(`--log-file=${getMpvLogPath()}`); mpvArgs.push(`--input-ipc-server=${socketPath}`); const mpvTarget = resolveCommandInvocation('mpv', mpvArgs); @@ -1109,8 +1118,8 @@ export async function waitForUnixSocketReady( socketPath: string, timeoutMs: number, ): Promise { - const deadline = Date.now() + timeoutMs; - while (Date.now() < deadline) { + const deadline = nowMs() + timeoutMs; + while (nowMs() < deadline) { try { if (fs.existsSync(socketPath)) { const ready = await canConnectUnixSocket(socketPath); diff --git a/launcher/time.ts b/launcher/time.ts new file mode 100644 index 0000000..46cac5c --- /dev/null +++ b/launcher/time.ts @@ -0,0 +1,8 @@ +export function nowMs(): number { + const perf = globalThis.performance; + if (perf) { + return Math.floor(perf.timeOrigin + perf.now()); + } + + return Number(process.hrtime.bigint() / 1000000n); +} diff --git a/launcher/util.ts b/launcher/util.ts index 3dc160f..1cc0c46 100644 --- a/launcher/util.ts +++ b/launcher/util.ts @@ -4,6 +4,7 @@ import os from 'node:os'; import { spawn } from 'node:child_process'; import type { LogLevel, CommandExecOptions, CommandExecResult } from './types.js'; import { log } from './log.js'; +import { nowMs } from './time.js'; export function sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); @@ -198,7 +199,7 @@ export function normalizeBasename(value: string, fallback: string): string { if (safe) return safe; const fallbackSafe = sanitizeToken(fallback); if (fallbackSafe) return fallbackSafe; - return `${Date.now()}`; + return `${nowMs()}`; } export function normalizeLangCode(value: string): string { diff --git a/package.json b/package.json index 8820663..3624458 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "subminer", - "version": "0.9.3", + "version": "0.10.0", "description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration", "packageManager": "bun@1.3.5", "main": "dist/main-entry.js", @@ -42,9 +42,9 @@ "test:config:smoke:dist": "bun test dist/config/path-resolution.test.js", "test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua", "test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts", - "test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts launcher/picker.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src", - "test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/stats-window.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts stats/src/lib/api-client.test.ts", - "test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/hyprland-tracker.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js", + "test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/mpv.test.ts launcher/picker.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src", + "test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/stats-window.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/services/anilist/rate-limiter.test.ts src/core/services/jlpt-token-filter.test.ts src/core/services/subtitle-position.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts stats/src/lib/api-client.test.ts", + "test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/core/services/anilist/rate-limiter.test.js dist/core/services/jlpt-token-filter.test.js dist/core/services/subtitle-position.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/hyprland-tracker.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js", "test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js", "test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist", "test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts", @@ -52,6 +52,8 @@ "test:immersion:sqlite:dist": "bun test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js", "test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist", "test:src": "bun scripts/run-test-lane.mjs bun-src-full", + "test:coverage:src": "bun run build:yomitan && bun run scripts/run-coverage-lane.ts bun-src-full --coverage-dir coverage/test-src", + "test:coverage:subtitle:src": "bun test --coverage --coverage-reporter=text --coverage-reporter=lcov --coverage-dir coverage/test-subtitle src/core/services/subsync.test.ts src/subsync/utils.test.ts", "test:launcher:unit:src": "bun scripts/run-test-lane.mjs bun-launcher-unit", "test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src", "test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src", @@ -63,7 +65,7 @@ "test:launcher": "bun run test:launcher:src", "test:core": "bun run test:core:src", "test:subtitle": "bun run test:subtitle:src", - "test:fast": "bun run test:config:src && bun run test:core:src && bun run test:docs:kb && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/release-workflow.test.ts src/ci-workflow.test.ts scripts/build-changelog.test.ts scripts/mkv-to-readme-video.test.ts scripts/update-aur-package.test.ts && bun run tsc && bun test dist/main/runtime/registry.test.js", + "test:fast": "bun run test:config:src && bun run test:core:src && bun run test:docs:kb && bun test src/main-entry-runtime.test.ts src/anki-integration.test.ts src/anki-integration/anki-connect-proxy.test.ts src/anki-integration/field-grouping-workflow.test.ts src/anki-integration/field-grouping.test.ts src/anki-integration/field-grouping-merge.test.ts src/release-workflow.test.ts src/ci-workflow.test.ts scripts/build-changelog.test.ts scripts/mkv-to-readme-video.test.ts scripts/run-coverage-lane.test.ts scripts/update-aur-package.test.ts && bun test src/core/services/immersion-tracker/__tests__/query.test.ts src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts && bun run tsc && bun test dist/main/runtime/registry.test.js", "generate:config-example": "bun run src/generate-config-example.ts", "verify:config-example": "bun run src/verify-config-example.ts", "start": "bun run build && electron . --start", @@ -98,10 +100,9 @@ "dependencies": { "@fontsource-variable/geist": "^5.2.8", "@fontsource-variable/geist-mono": "^5.2.7", - "@hono/node-server": "^1.19.11", + "@xhayper/discord-rpc": "^1.3.3", "axios": "^1.13.5", "commander": "^14.0.3", - "discord-rpc": "^4.0.1", "hono": "^4.12.7", "jsonc-parser": "^3.3.1", "libsql": "^0.5.22", diff --git a/plugin/subminer/process.lua b/plugin/subminer/process.lua index 532f65f..a51f53b 100644 --- a/plugin/subminer/process.lua +++ b/plugin/subminer/process.lua @@ -153,6 +153,9 @@ function M.create(ctx) local function notify_auto_play_ready() release_auto_play_ready_gate("tokenization-ready") + if state.suppress_ready_overlay_restore then + return + end if state.overlay_running and resolve_visible_overlay_startup() then run_control_command_async("show-visible-overlay", { socket_path = opts.socket_path, @@ -287,6 +290,9 @@ function M.create(ctx) local function start_overlay(overrides) overrides = overrides or {} + if overrides.auto_start_trigger == true then + state.suppress_ready_overlay_restore = false + end if not binary.ensure_binary_available() then subminer_log("error", "binary", "SubMiner binary not found") @@ -433,6 +439,7 @@ function M.create(ctx) subminer_log("error", "binary", "SubMiner binary not found") return end + state.suppress_ready_overlay_restore = true run_control_command_async("hide-visible-overlay", nil, function(ok, result) if ok then @@ -456,8 +463,9 @@ function M.create(ctx) show_osd("Error: binary not found") return end + state.suppress_ready_overlay_restore = true - run_control_command_async("toggle", nil, function(ok) + run_control_command_async("toggle-visible-overlay", nil, function(ok) if not ok then subminer_log("warn", "process", "Toggle command failed") show_osd("Toggle failed") diff --git a/plugin/subminer/state.lua b/plugin/subminer/state.lua index 732624e..8814b0e 100644 --- a/plugin/subminer/state.lua +++ b/plugin/subminer/state.lua @@ -32,6 +32,7 @@ function M.new() auto_play_ready_gate_armed = false, auto_play_ready_timeout = nil, auto_play_ready_osd_timer = nil, + suppress_ready_overlay_restore = false, } end diff --git a/plugins/subminer-workflow/.codex-plugin/plugin.json b/plugins/subminer-workflow/.codex-plugin/plugin.json new file mode 100644 index 0000000..9b5db65 --- /dev/null +++ b/plugins/subminer-workflow/.codex-plugin/plugin.json @@ -0,0 +1,30 @@ +{ + "name": "subminer-workflow", + "version": "0.1.0", + "description": "Repo-local SubMiner agent workflow plugin for backlog-first orchestration and change verification.", + "author": { + "name": "Kyle Yasuda", + "email": "suda@sudacode.com", + "url": "https://github.com/sudacode" + }, + "homepage": "https://github.com/sudacode/SubMiner/tree/main/plugins/subminer-workflow", + "repository": "https://github.com/sudacode/SubMiner", + "license": "GPL-3.0-or-later", + "keywords": ["subminer", "workflow", "backlog", "verification", "skills"], + "skills": "./skills/", + "interface": { + "displayName": "SubMiner Workflow", + "shortDescription": "Backlog-first SubMiner orchestration and verification.", + "longDescription": "Canonical repo-local plugin for SubMiner agent workflow packaging. Owns the scrum-master and change-verification skills plus helper scripts used to plan, verify, and validate changes reproducibly inside this repo.", + "developerName": "Kyle Yasuda", + "category": "Productivity", + "capabilities": ["Interactive", "Write"], + "websiteURL": "https://github.com/sudacode/SubMiner", + "defaultPrompt": [ + "Use SubMiner workflow to plan and ship a feature.", + "Verify a SubMiner change with the plugin-owned verifier.", + "Run backlog-first intake for this SubMiner task." + ], + "brandColor": "#2F6B4F" + } +} diff --git a/plugins/subminer-workflow/README.md b/plugins/subminer-workflow/README.md new file mode 100644 index 0000000..5eea07f --- /dev/null +++ b/plugins/subminer-workflow/README.md @@ -0,0 +1,49 @@ + + +# SubMiner Workflow Plugin + +Status: active +Last verified: 2026-03-26 +Owner: Kyle Yasuda +Read when: using or updating the repo-local plugin that owns SubMiner agent workflow skills + +This plugin is the canonical source of truth for the SubMiner agent workflow packaging. + +## Contents + +- `skills/subminer-scrum-master/` + - backlog-first intake, planning, dispatch, and handoff workflow +- `skills/subminer-change-verification/` + - cheap-first verification workflow plus helper scripts + +## Backlog MCP + +- This plugin assumes Backlog.md MCP is available in the host environment when the client exposes it. +- Canonical backlog behavior remains: + - read `backlog://workflow/overview` when resources are available + - otherwise use the matching backlog tool overview +- If backlog MCP is unavailable in the current session, fall back to direct repo-local `backlog/` edits and record that blocker in the task or handoff. + +## Compatibility + +- `.agents/skills/subminer-scrum-master/` is a compatibility shim that redirects to the plugin-owned skill. +- `.agents/skills/subminer-change-verification/` is a compatibility shim. +- `.agents/skills/subminer-change-verification/scripts/*.sh` remain as wrapper entrypoints so existing docs, backlog tasks, and shell history keep working. + +## Verification + +For plugin/doc/shim changes, prefer: + +```bash +bun run test:docs:kb +bash plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh --lane docs --lane core \ + plugins/subminer-workflow \ + .agents/skills/subminer-scrum-master/SKILL.md \ + .agents/skills/subminer-change-verification/SKILL.md \ + .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh \ + .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh \ + .agents/plugins/marketplace.json \ + docs/workflow/README.md \ + docs/workflow/agent-plugins.md \ + backlog/tasks/task-240\ -\ Migrate-SubMiner-agent-skills-into-a-repo-local-plugin-workflow.md +``` diff --git a/plugins/subminer-workflow/skills/subminer-change-verification/SKILL.md b/plugins/subminer-workflow/skills/subminer-change-verification/SKILL.md new file mode 100644 index 0000000..585a764 --- /dev/null +++ b/plugins/subminer-workflow/skills/subminer-change-verification/SKILL.md @@ -0,0 +1,141 @@ +--- +name: 'subminer-change-verification' +description: 'Use when working in the SubMiner repo and you need to verify code changes actually work. Covers targeted regression checks during debugging and pre-handoff verification, with cheap-first lane selection for config, docs, launcher/plugin, runtime-compat, and optional real-runtime escalation.' +--- + +# SubMiner Change Verification + +Canonical source: this plugin path. + +Use this skill for SubMiner code changes. Default to cheap, repo-native verification first. Escalate only when the changed behavior actually depends on Electron, mpv, overlay/window tracking, or other GUI-sensitive runtime behavior. + +## Scripts + +- `scripts/classify_subminer_diff.sh` + - Emits suggested lanes and flags from explicit paths or current git changes. +- `scripts/verify_subminer_change.sh` + - Runs selected lanes, captures artifacts, and writes a compact summary. + +If you need an explicit installed path, use the directory that contains this `SKILL.md`. The helper scripts live under: + +```bash +export SUBMINER_VERIFY_SKILL="" +``` + +## Default workflow + +1. Inspect the changed files or user-requested area. +2. Run the classifier unless you already know the right lane. +3. Run the verifier with the cheapest sufficient lane set. +4. If the classifier emits `flag:real-runtime-candidate`, do not jump straight to runtime verification. First run the non-runtime lanes. +5. Escalate to explicit `--lane real-runtime --allow-real-runtime` only when cheaper lanes cannot validate the behavior claim. +6. Return: + - verification summary + - exact commands run + - artifact paths + - skipped lanes and blockers + +## Quick start + +Plugin-source quick start: + +```bash +bash plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh +``` + +Installed-skill quick start: + +```bash +bash "$SUBMINER_VERIFY_SKILL/scripts/classify_subminer_diff.sh" +``` + +Compatibility entrypoint: + +```bash +bash .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh +``` + +Classify explicit files: + +```bash +bash plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh \ + launcher/main.ts \ + plugin/subminer/lifecycle.lua \ + src/main/runtime/mpv-client-runtime-service.ts +``` + +Run automatic lane selection: + +```bash +bash plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh +``` + +Installed-skill form: + +```bash +bash "$SUBMINER_VERIFY_SKILL/scripts/verify_subminer_change.sh" +``` + +Compatibility entrypoint: + +```bash +bash .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh +``` + +Run targeted lanes: + +```bash +bash plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh \ + --lane launcher-plugin \ + --lane runtime-compat +``` + +Dry-run to inspect planned commands and artifact layout: + +```bash +bash plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh \ + --dry-run \ + launcher/main.ts \ + src/main.ts +``` + +## Lane guidance + +- `docs` + - For `docs-site/`, `docs/`, and doc-only edits. +- `config` + - For `src/config/` and config-template-sensitive edits. +- `core` + - For general source changes where `typecheck` + `test:fast` is the best cheap signal. +- `launcher-plugin` + - For `launcher/`, `plugin/subminer/`, plugin gating scripts, and wrapper/mpv routing work. +- `runtime-compat` + - For `src/main*`, runtime/composer wiring, mpv/overlay services, window trackers, and dist-sensitive behavior. +- `real-runtime` + - Only after deliberate escalation. + +## Real Runtime Escalation + +Escalate only when the change claim depends on actual runtime behavior, for example: + +- overlay appears, hides, or tracks a real mpv window +- mpv launch flags or pause-until-ready behavior +- plugin/socket/auto-start handshake under a real player +- macOS/window-tracker/focus-sensitive behavior + +If the environment cannot support authoritative runtime verification, report the blocker explicitly. Do not silently downgrade a runtime-required claim to a pass. + +## Artifact contract + +The verifier writes under `.tmp/skill-verification//`: + +- `summary.json` +- `summary.txt` +- `classification.txt` +- `env.txt` +- `lanes.txt` +- `steps.tsv` +- `steps/*.stdout.log` +- `steps/*.stderr.log` + +On failure, quote the exact failing command and point at the artifact directory. diff --git a/plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh b/plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh new file mode 100755 index 0000000..a983ff3 --- /dev/null +++ b/plugins/subminer-workflow/skills/subminer-change-verification/scripts/classify_subminer_diff.sh @@ -0,0 +1,163 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: classify_subminer_diff.sh [path ...] + +Emit suggested verification lanes for explicit paths or current local git changes. + +Output format: + lane: + flag: + reason: +EOF +} + +has_item() { + local needle=$1 + shift || true + local item + for item in "$@"; do + if [[ "$item" == "$needle" ]]; then + return 0 + fi + done + return 1 +} + +add_lane() { + local lane=$1 + if ! has_item "$lane" "${LANES[@]:-}"; then + LANES+=("$lane") + fi +} + +add_flag() { + local flag=$1 + if ! has_item "$flag" "${FLAGS[@]:-}"; then + FLAGS+=("$flag") + fi +} + +add_reason() { + REASONS+=("$1") +} + +collect_git_paths() { + local top_level + if ! top_level=$(git rev-parse --show-toplevel 2>/dev/null); then + return 0 + fi + + ( + cd "$top_level" + if git rev-parse --verify HEAD >/dev/null 2>&1; then + git diff --name-only --relative HEAD -- + git diff --name-only --relative --cached -- + else + git diff --name-only --relative -- + git diff --name-only --relative --cached -- + fi + git ls-files --others --exclude-standard + ) | awk 'NF' | sort -u +} + +if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then + usage + exit 0 +fi + +declare -a PATHS=() +declare -a LANES=() +declare -a FLAGS=() +declare -a REASONS=() + +if [[ $# -gt 0 ]]; then + while [[ $# -gt 0 ]]; do + PATHS+=("$1") + shift + done +else + while IFS= read -r line; do + [[ -n "$line" ]] && PATHS+=("$line") + done < <(collect_git_paths) +fi + +if [[ ${#PATHS[@]} -eq 0 ]]; then + add_lane "core" + add_reason "no changed paths detected -> default to core" +fi + +for path in "${PATHS[@]}"; do + specialized=0 + + case "$path" in + docs-site/*|docs/*|changes/*|README.md) + add_lane "docs" + add_reason "$path -> docs" + specialized=1 + ;; + esac + + case "$path" in + src/config/*|src/generate-config-example.ts|src/verify-config-example.ts|docs-site/public/config.example.jsonc|config.example.jsonc) + add_lane "config" + add_reason "$path -> config" + specialized=1 + ;; + esac + + case "$path" in + launcher/*|plugin/subminer/*|plugin/subminer.conf|scripts/test-plugin-*|scripts/get-mpv-window-*|scripts/configure-plugin-binary-path.mjs) + add_lane "launcher-plugin" + add_reason "$path -> launcher-plugin" + add_flag "real-runtime-candidate" + add_reason "$path -> real-runtime-candidate" + specialized=1 + ;; + esac + + case "$path" in + src/main.ts|src/main-entry.ts|src/preload.ts|src/main/*|src/core/services/mpv*|src/core/services/overlay*|src/renderer/*|src/window-trackers/*|scripts/prepare-build-assets.mjs) + add_lane "runtime-compat" + add_reason "$path -> runtime-compat" + add_flag "real-runtime-candidate" + add_reason "$path -> real-runtime-candidate" + specialized=1 + ;; + esac + + if [[ "$specialized" == "0" ]]; then + case "$path" in + src/*|package.json|tsconfig*.json|scripts/*|Makefile) + add_lane "core" + add_reason "$path -> core" + ;; + esac + fi + + case "$path" in + package.json|src/main.ts|src/main-entry.ts|src/preload.ts) + add_flag "broad-impact" + add_reason "$path -> broad-impact" + ;; + esac +done + +if [[ ${#LANES[@]} -eq 0 ]]; then + add_lane "core" + add_reason "no lane-specific matches -> default to core" +fi + +for lane in "${LANES[@]}"; do + printf 'lane:%s\n' "$lane" +done + +for flag in "${FLAGS[@]}"; do + printf 'flag:%s\n' "$flag" +done + +for reason in "${REASONS[@]}"; do + printf 'reason:%s\n' "$reason" +done diff --git a/plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh b/plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh new file mode 100755 index 0000000..db63797 --- /dev/null +++ b/plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh @@ -0,0 +1,524 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: verify_subminer_change.sh [options] [path ...] + +Options: + --lane Force a verification lane. Repeatable. + --artifact-dir Use an explicit artifact directory. + --allow-real-runtime Allow explicit real-runtime execution. + --allow-real-gui Deprecated alias for --allow-real-runtime. + --dry-run Record planned steps without executing commands. + --help Show this help text. + +If no lanes are supplied, the script classifies the provided paths. If no paths are +provided, it classifies the current local git changes. + +Authoritative real-runtime verification should be requested with explicit path +arguments instead of relying on inferred local git changes. +EOF +} + +timestamp() { + date +%Y%m%d-%H%M%S +} + +timestamp_iso() { + date -u +%Y-%m-%dT%H:%M:%SZ +} + +generate_session_id() { + local tmp_dir + tmp_dir=$(mktemp -d "${TMPDIR:-/tmp}/subminer-verify-$(timestamp)-XXXXXX") + basename "$tmp_dir" + rmdir "$tmp_dir" +} + +has_item() { + local needle=$1 + shift || true + local item + for item in "$@"; do + if [[ "$item" == "$needle" ]]; then + return 0 + fi + done + return 1 +} + +normalize_lane_name() { + case "$1" in + real-gui) + printf '%s' "real-runtime" + ;; + *) + printf '%s' "$1" + ;; + esac +} + +add_lane() { + local lane + lane=$(normalize_lane_name "$1") + if ! has_item "$lane" "${SELECTED_LANES[@]:-}"; then + SELECTED_LANES+=("$lane") + fi +} + +add_blocker() { + BLOCKERS+=("$1") + BLOCKED=1 +} + +validate_artifact_dir() { + local candidate=$1 + if [[ ! "$candidate" =~ ^[A-Za-z0-9._/@:+-]+$ ]]; then + echo "Invalid characters in --artifact-dir path" >&2 + exit 2 + fi +} + +append_step_record() { + printf '%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' \ + "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" >>"$STEPS_TSV" +} + +record_env() { + { + printf 'repo_root=%s\n' "$REPO_ROOT" + printf 'session_id=%s\n' "$SESSION_ID" + printf 'artifact_dir=%s\n' "$ARTIFACT_DIR" + printf 'path_selection_mode=%s\n' "$PATH_SELECTION_MODE" + printf 'dry_run=%s\n' "$DRY_RUN" + printf 'allow_real_runtime=%s\n' "$ALLOW_REAL_RUNTIME" + printf 'session_home=%s\n' "$SESSION_HOME" + printf 'session_xdg_config_home=%s\n' "$SESSION_XDG_CONFIG_HOME" + printf 'session_mpv_dir=%s\n' "$SESSION_MPV_DIR" + printf 'session_logs_dir=%s\n' "$SESSION_LOGS_DIR" + printf 'session_mpv_log=%s\n' "$SESSION_MPV_LOG" + printf 'pwd=%s\n' "$(pwd)" + git rev-parse --short HEAD 2>/dev/null | sed 's/^/git_head=/' || true + git status --short 2>/dev/null || true + if [[ ${#PATH_ARGS[@]} -gt 0 ]]; then + printf 'requested_paths=\n' + printf ' %s\n' "${PATH_ARGS[@]}" + fi + } >"$ARTIFACT_DIR/env.txt" +} + +run_step() { + local lane=$1 + local name=$2 + local command=$3 + local note=${4:-} + local lane_slug=${lane//[^a-zA-Z0-9_-]/-} + local slug=${name//[^a-zA-Z0-9_-]/-} + local step_slug="${lane_slug}--${slug}" + local stdout_rel="steps/${step_slug}.stdout.log" + local stderr_rel="steps/${step_slug}.stderr.log" + local stdout_path="$ARTIFACT_DIR/$stdout_rel" + local stderr_path="$ARTIFACT_DIR/$stderr_rel" + local status exit_code + + COMMANDS_RUN+=("$command") + printf '%s\n' "$command" >"$ARTIFACT_DIR/steps/${step_slug}.command.txt" + + if [[ "$DRY_RUN" == "1" ]]; then + printf '[dry-run] %s\n' "$command" >"$stdout_path" + : >"$stderr_path" + status="dry-run" + exit_code=0 + else + if HOME="$SESSION_HOME" \ + XDG_CONFIG_HOME="$SESSION_XDG_CONFIG_HOME" \ + SUBMINER_SESSION_LOGS_DIR="$SESSION_LOGS_DIR" \ + SUBMINER_SESSION_MPV_LOG="$SESSION_MPV_LOG" \ + bash -c "cd \"$REPO_ROOT\" && $command" >"$stdout_path" 2>"$stderr_path"; then + status="passed" + exit_code=0 + EXECUTED_REAL_STEPS=1 + else + exit_code=$? + status="failed" + FAILED=1 + fi + fi + + append_step_record "$lane" "$name" "$status" "$exit_code" "$command" "$stdout_rel" "$stderr_rel" "$note" + printf '%s\t%s\t%s\n' "$lane" "$name" "$status" + + if [[ "$status" == "failed" ]]; then + FAILURE_STEP="$name" + FAILURE_COMMAND="$command" + FAILURE_STDOUT="$stdout_rel" + FAILURE_STDERR="$stderr_rel" + return "$exit_code" + fi +} + +record_nonpassing_step() { + local lane=$1 + local name=$2 + local status=$3 + local note=$4 + local lane_slug=${lane//[^a-zA-Z0-9_-]/-} + local slug=${name//[^a-zA-Z0-9_-]/-} + local step_slug="${lane_slug}--${slug}" + local stdout_rel="steps/${step_slug}.stdout.log" + local stderr_rel="steps/${step_slug}.stderr.log" + printf '%s\n' "$note" >"$ARTIFACT_DIR/$stdout_rel" + : >"$ARTIFACT_DIR/$stderr_rel" + append_step_record "$lane" "$name" "$status" "0" "" "$stdout_rel" "$stderr_rel" "$note" + printf '%s\t%s\t%s\n' "$lane" "$name" "$status" +} + +record_skipped_step() { + record_nonpassing_step "$1" "$2" "skipped" "$3" +} + +record_blocked_step() { + add_blocker "$3" + record_nonpassing_step "$1" "$2" "blocked" "$3" +} + +record_failed_step() { + FAILED=1 + FAILURE_STEP=$2 + FAILURE_COMMAND=${FAILURE_COMMAND:-"(validation)"} + local lane_slug=${1//[^a-zA-Z0-9_-]/-} + local step_slug=${2//[^a-zA-Z0-9_-]/-} + FAILURE_STDOUT="steps/${lane_slug}--${step_slug}.stdout.log" + FAILURE_STDERR="steps/${lane_slug}--${step_slug}.stderr.log" + add_blocker "$3" + record_nonpassing_step "$1" "$2" "failed" "$3" +} + +find_real_runtime_helper() { + local candidate + for candidate in \ + "$SCRIPT_DIR/run_real_runtime_smoke.sh" \ + "$SCRIPT_DIR/run_real_mpv_smoke.sh"; do + if [[ -x "$candidate" ]]; then + printf '%s' "$candidate" + return 0 + fi + done + return 1 +} + +acquire_real_runtime_lease() { + local lease_root="$REPO_ROOT/.tmp/skill-verification/locks" + local lease_dir="$lease_root/exclusive-real-runtime" + mkdir -p "$lease_root" + if mkdir "$lease_dir" 2>/dev/null; then + REAL_RUNTIME_LEASE_DIR="$lease_dir" + printf '%s\n' "$SESSION_ID" >"$lease_dir/session_id" + return 0 + fi + + local owner="" + if [[ -f "$lease_dir/session_id" ]]; then + owner=$(cat "$lease_dir/session_id") + fi + REAL_RUNTIME_LEASE_ERROR="real-runtime lease already held${owner:+ by $owner}" + return 1 +} + +release_real_runtime_lease() { + if [[ -n "$REAL_RUNTIME_LEASE_DIR" && -d "$REAL_RUNTIME_LEASE_DIR" ]]; then + if [[ -f "$REAL_RUNTIME_LEASE_DIR/session_id" ]]; then + local owner + owner=$(cat "$REAL_RUNTIME_LEASE_DIR/session_id") + if [[ "$owner" != "$SESSION_ID" ]]; then + return 0 + fi + fi + rm -rf "$REAL_RUNTIME_LEASE_DIR" + fi +} + +compute_final_status() { + if [[ "$FAILED" == "1" ]]; then + FINAL_STATUS="failed" + elif [[ "$BLOCKED" == "1" ]]; then + FINAL_STATUS="blocked" + elif [[ "$EXECUTED_REAL_STEPS" == "1" ]]; then + FINAL_STATUS="passed" + else + FINAL_STATUS="skipped" + fi +} + +write_summary_files() { + local lane_lines + lane_lines=$(printf '%s\n' "${SELECTED_LANES[@]}") + printf '%s\n' "$lane_lines" >"$ARTIFACT_DIR/lanes.txt" + printf '%s\n' "${BLOCKERS[@]}" >"$ARTIFACT_DIR/blockers.txt" + printf '%s\n' "${PATH_ARGS[@]}" >"$ARTIFACT_DIR/requested-paths.txt" + + ARTIFACT_DIR_ENV="$ARTIFACT_DIR" \ + SESSION_ID_ENV="$SESSION_ID" \ + FINAL_STATUS_ENV="$FINAL_STATUS" \ + PATH_SELECTION_MODE_ENV="$PATH_SELECTION_MODE" \ + ALLOW_REAL_RUNTIME_ENV="$ALLOW_REAL_RUNTIME" \ + SESSION_HOME_ENV="$SESSION_HOME" \ + SESSION_XDG_CONFIG_HOME_ENV="$SESSION_XDG_CONFIG_HOME" \ + SESSION_MPV_DIR_ENV="$SESSION_MPV_DIR" \ + SESSION_LOGS_DIR_ENV="$SESSION_LOGS_DIR" \ + SESSION_MPV_LOG_ENV="$SESSION_MPV_LOG" \ + STARTED_AT_ENV="$STARTED_AT" \ + FINISHED_AT_ENV="$FINISHED_AT" \ + FAILED_ENV="$FAILED" \ + FAILURE_COMMAND_ENV="${FAILURE_COMMAND:-}" \ + FAILURE_STDOUT_ENV="${FAILURE_STDOUT:-}" \ + FAILURE_STDERR_ENV="${FAILURE_STDERR:-}" \ + bun -e ' + const fs = require("fs"); + const path = require("path"); + + const lines = fs + .readFileSync(path.join(process.env.ARTIFACT_DIR_ENV, "steps.tsv"), "utf8") + .trim() + .split("\n") + .filter(Boolean) + .slice(1) + .map((line) => { + const [lane, name, status, exitCode, command, stdout, stderr, note] = line.split("\t"); + return { lane, name, status, exitCode: Number(exitCode), command, stdout, stderr, note }; + }); + + const payload = { + sessionId: process.env.SESSION_ID_ENV, + startedAt: process.env.STARTED_AT_ENV, + finishedAt: process.env.FINISHED_AT_ENV, + status: process.env.FINAL_STATUS_ENV, + pathSelectionMode: process.env.PATH_SELECTION_MODE_ENV, + allowRealRuntime: process.env.ALLOW_REAL_RUNTIME_ENV === "1", + sessionHome: process.env.SESSION_HOME_ENV, + sessionXdgConfigHome: process.env.SESSION_XDG_CONFIG_HOME_ENV, + sessionMpvDir: process.env.SESSION_MPV_DIR_ENV, + sessionLogsDir: process.env.SESSION_LOGS_DIR_ENV, + sessionMpvLog: process.env.SESSION_MPV_LOG_ENV, + failed: process.env.FAILED_ENV === "1", + failure: process.env.FAILURE_COMMAND_ENV + ? { + command: process.env.FAILURE_COMMAND_ENV, + stdout: process.env.FAILURE_STDOUT_ENV, + stderr: process.env.FAILURE_STDERR_ENV, + } + : null, + blockers: fs + .readFileSync(path.join(process.env.ARTIFACT_DIR_ENV, "blockers.txt"), "utf8") + .split("\n") + .filter(Boolean), + lanes: fs + .readFileSync(path.join(process.env.ARTIFACT_DIR_ENV, "lanes.txt"), "utf8") + .split("\n") + .filter(Boolean), + requestedPaths: fs + .readFileSync(path.join(process.env.ARTIFACT_DIR_ENV, "requested-paths.txt"), "utf8") + .split("\n") + .filter(Boolean), + steps: lines, + }; + + fs.writeFileSync( + path.join(process.env.ARTIFACT_DIR_ENV, "summary.json"), + JSON.stringify(payload, null, 2) + "\n", + ); + + const summaryLines = [ + `status: ${payload.status}`, + `session: ${payload.sessionId}`, + `artifacts: ${process.env.ARTIFACT_DIR_ENV}`, + `lanes: ${payload.lanes.join(", ") || "(none)"}`, + ]; + + if (payload.requestedPaths.length > 0) { + summaryLines.push("requested paths:"); + for (const entry of payload.requestedPaths) { + summaryLines.push(`- ${entry}`); + } + } + + if (payload.failure) { + summaryLines.push(`failure command: ${payload.failure.command}`); + summaryLines.push(`failure stdout: ${payload.failure.stdout}`); + summaryLines.push(`failure stderr: ${payload.failure.stderr}`); + } + + if (payload.blockers.length > 0) { + summaryLines.push("blockers:"); + for (const blocker of payload.blockers) { + summaryLines.push(`- ${blocker}`); + } + } + + summaryLines.push("steps:"); + for (const step of payload.steps) { + summaryLines.push(`- ${step.lane}/${step.name}: ${step.status}`); + } + + fs.writeFileSync( + path.join(process.env.ARTIFACT_DIR_ENV, "summary.txt"), + summaryLines.join("\n") + "\n", + ); + ' +} + +SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +SKILL_DIR=$(cd "$SCRIPT_DIR/.." && pwd) +REPO_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd) + +declare -a PATH_ARGS=() +declare -a SELECTED_LANES=() +declare -a COMMANDS_RUN=() +declare -a BLOCKERS=() + +ALLOW_REAL_RUNTIME=0 +DRY_RUN=0 +FAILED=0 +BLOCKED=0 +EXECUTED_REAL_STEPS=0 +FAILURE_STEP="" +FAILURE_COMMAND="" +FAILURE_STDOUT="" +FAILURE_STDERR="" +REAL_RUNTIME_LEASE_DIR="" +REAL_RUNTIME_LEASE_ERROR="" +PATH_SELECTION_MODE="auto" + +trap 'release_real_runtime_lease' EXIT + +while [[ $# -gt 0 ]]; do + case "$1" in + --lane) + shift + [[ $# -gt 0 ]] || { + echo "Missing value for --lane" >&2 + exit 2 + } + add_lane "$1" + PATH_SELECTION_MODE="explicit-lanes" + ;; + --artifact-dir) + shift + [[ $# -gt 0 ]] || { + echo "Missing value for --artifact-dir" >&2 + exit 2 + } + ARTIFACT_DIR=$1 + ;; + --allow-real-runtime|--allow-real-gui) + ALLOW_REAL_RUNTIME=1 + ;; + --dry-run) + DRY_RUN=1 + ;; + --help|-h) + usage + exit 0 + ;; + *) + PATH_ARGS+=("$1") + ;; + esac + shift || true +done + +if [[ -z "${ARTIFACT_DIR:-}" ]]; then + SESSION_ID=$(generate_session_id) + ARTIFACT_DIR="$REPO_ROOT/.tmp/skill-verification/$SESSION_ID" +else + validate_artifact_dir "$ARTIFACT_DIR" + SESSION_ID=$(basename "$ARTIFACT_DIR") +fi + +mkdir -p "$ARTIFACT_DIR/steps" +STEPS_TSV="$ARTIFACT_DIR/steps.tsv" +printf 'lane\tstep\tstatus\texit_code\tcommand\tstdout\tstderr\tnote\n' >"$STEPS_TSV" + +STARTED_AT=$(timestamp_iso) +SESSION_HOME="$REPO_ROOT/.tmp/skill-verification/runtime/$SESSION_ID/home" +SESSION_XDG_CONFIG_HOME="$REPO_ROOT/.tmp/skill-verification/runtime/$SESSION_ID/xdg-config" +SESSION_MPV_DIR="$SESSION_XDG_CONFIG_HOME/mpv" +SESSION_LOGS_DIR="$REPO_ROOT/.tmp/skill-verification/runtime/$SESSION_ID/logs" +SESSION_MPV_LOG="$SESSION_LOGS_DIR/mpv.log" +mkdir -p "$SESSION_HOME" "$SESSION_MPV_DIR" "$SESSION_LOGS_DIR" + +CLASSIFIER_OUTPUT="$ARTIFACT_DIR/classification.txt" +if [[ ${#SELECTED_LANES[@]} -eq 0 ]]; then + if [[ ${#PATH_ARGS[@]} -gt 0 ]]; then + PATH_SELECTION_MODE="explicit-paths" + fi + if "$SCRIPT_DIR/classify_subminer_diff.sh" "${PATH_ARGS[@]}" >"$CLASSIFIER_OUTPUT"; then + while IFS= read -r line; do + case "$line" in + lane:*) + add_lane "${line#lane:}" + ;; + esac + done <"$CLASSIFIER_OUTPUT" + else + record_failed_step "meta" "classify" "classification failed" + fi +else + : >"$CLASSIFIER_OUTPUT" +fi + +record_env + +if [[ ${#SELECTED_LANES[@]} -eq 0 ]]; then + add_lane "core" +fi + +for lane in "${SELECTED_LANES[@]}"; do + case "$lane" in + docs) + run_step "$lane" "docs-kb" "bun run test:docs:kb" || break + ;; + config) + run_step "$lane" "config" "bun run test:config" || break + ;; + core) + run_step "$lane" "typecheck" "bun run typecheck" || break + run_step "$lane" "fast-tests" "bun run test:fast" || break + ;; + launcher-plugin) + run_step "$lane" "launcher" "bun run test:launcher" || break + run_step "$lane" "plugin-src" "bun run test:plugin:src" || break + ;; + runtime-compat) + run_step "$lane" "runtime-compat" "bun run test:runtime:compat" || break + ;; + real-runtime) + if [[ "$ALLOW_REAL_RUNTIME" != "1" ]]; then + record_blocked_step "$lane" "real-runtime" "real-runtime requested without --allow-real-runtime" + continue + fi + if ! acquire_real_runtime_lease; then + record_blocked_step "$lane" "real-runtime-lease" "$REAL_RUNTIME_LEASE_ERROR" + continue + fi + helper=$(find_real_runtime_helper || true) + if [[ -z "${helper:-}" ]]; then + record_blocked_step "$lane" "real-runtime-helper" "no real-runtime helper script available in $SCRIPT_DIR" + continue + fi + run_step "$lane" "real-runtime" "\"$helper\" \"$SESSION_ID\" \"$ARTIFACT_DIR\"" || break + ;; + *) + record_blocked_step "$lane" "unknown-lane" "unknown lane: $lane" + ;; + esac +done + +release_real_runtime_lease +FINISHED_AT=$(timestamp_iso) +compute_final_status +write_summary_files + +printf 'summary:%s\n' "$ARTIFACT_DIR/summary.txt" +cat "$ARTIFACT_DIR/summary.txt" diff --git a/plugins/subminer-workflow/skills/subminer-scrum-master/SKILL.md b/plugins/subminer-workflow/skills/subminer-scrum-master/SKILL.md new file mode 100644 index 0000000..7e76e42 --- /dev/null +++ b/plugins/subminer-workflow/skills/subminer-scrum-master/SKILL.md @@ -0,0 +1,162 @@ +--- +name: 'subminer-scrum-master' +description: 'Use in the SubMiner repo when a request should be turned into planned work and driven through execution. Assesses whether backlog tracking is warranted, creates or updates tasks when needed, records a plan, dispatches one or more subagents, and requires verification before handoff.' +--- + +# SubMiner Scrum Master + +Canonical source: this plugin path. + +Own workflow, not code by default. + +Use this skill when the user gives a feature request, bug report, issue, refactor, or implementation ask and the agent should manage intake, planning, backlog hygiene, worker dispatch, and verification through completion. + +## Core Rules + +1. Decide first whether backlog tracking is warranted. +2. If backlog is needed, search first. Update existing work when it clearly matches. +3. If backlog is not needed, keep the process light. Do not invent ticket ceremony. +4. Record a plan before dispatching coding work. +5. Use parent + subtasks for multi-part work when backlog is used. +6. Dispatch conservatively. Parallelize only disjoint write scopes. +7. Require verification before handoff, typically via `subminer-change-verification`. +8. Report backlog actions, dispatched workers, verification, blockers, and remaining risks. + +## Backlog Workflow + +Preferred order: + +1. Read `backlog://workflow/overview` when MCP resources are available. +2. If resources are unavailable, use the corresponding backlog tool overview. +3. If backlog MCP is unavailable in the session, work directly in repo-local `backlog/` files and record that constraint explicitly. + +## Backlog Decision + +Skip backlog when the request is: + +- question only +- obvious mechanical edit +- tiny isolated change with no real planning + +Use backlog when the work: + +- needs planning or scope decisions +- spans multiple phases or subsystems +- is likely to need subagent dispatch +- should remain traceable for handoff/resume + +If backlog is used: + +- search existing tasks first +- create/update a standalone task for one focused deliverable +- create/update a parent task plus subtasks for multi-part work +- record the implementation plan in the task before implementation begins + +## Intake Workflow + +1. Parse the request. + Classify it as question, mechanical edit, bugfix, feature, refactor, investigation, or follow-up. +2. Decide whether backlog is needed. +3. If backlog is needed: + - search first + - update existing task if clearly relevant + - otherwise create the right structure + - write the implementation plan before dispatch +4. If backlog is skipped: + - write a short working plan in-thread + - proceed without fake ticketing +5. Choose execution mode: + - no subagents for trivial work + - one worker for focused work + - parallel workers only for disjoint scopes +6. Run verification before handoff. + +## Dispatch Rules + +The scrum master orchestrates. Workers implement. + +- Do not become the default implementer unless delegation is unnecessary. +- Do not parallelize overlapping files or tightly coupled runtime work. +- Give every worker explicit ownership of files/modules. +- Tell every worker other agents may be active and they must not revert unrelated edits. +- Require each worker to report: + - changed files + - tests run + - blockers + +Use worker agents for implementation and explorer agents only for bounded codebase questions. + +## Verification + +Every nontrivial code task gets verification. + +Preferred flow: + +1. use `subminer-change-verification` +2. start with the cheapest sufficient lane +3. escalate only when needed +4. if worker verification is sufficient, accept it or run one final consolidating pass + +Never hand off nontrivial work without stating what was verified and what was skipped. + +## Pre-Handoff Policy Checks + +Before handoff, always ask and answer both questions explicitly: + +1. Docs update required? +2. Changelog fragment required? + +Rules: + +- Do not assume silence implies "no." +- If the answer is yes, complete the update or report the blocker. +- Include final yes/no answers in the handoff summary even when both answers are "no." + +## Failure / Scope Handling + +- If a worker hits ambiguity, pause and ask the user. +- If verification fails, either: + - send the worker back with exact failure context, or + - fix it directly if it is tiny and clearly in scope +- If new scope appears, revisit backlog structure before silently expanding work. + +## Representative Flows + +### Trivial no-ticket work + +- decide backlog is unnecessary +- keep a short plan +- implement directly or with one worker if helpful +- run targeted verification +- report outcome concisely + +### Single-task implementation + +- search/create/update one task +- record plan +- dispatch one worker +- integrate +- verify +- update task and report outcome + +### Parent + subtasks execution + +- search/create/update parent task +- create subtasks for distinct deliverables/phases +- record sequencing in the plan +- dispatch workers only where scopes are disjoint +- integrate +- run consolidated verification +- update task state and report outcome + +## Output Expectations + +At the end, report: + +- whether backlog was used and what changed +- which workers were dispatched and what they owned +- what verification ran +- explicit answers to: + - docs update required? + - changelog fragment required? +- blockers, skips, and risks diff --git a/scripts/build-changelog.test.ts b/scripts/build-changelog.test.ts index 6de8418..a6d7186 100644 --- a/scripts/build-changelog.test.ts +++ b/scripts/build-changelog.test.ts @@ -111,7 +111,11 @@ test('writeChangelogArtifacts skips changelog prepend when release section alrea fs.mkdirSync(projectRoot, { recursive: true }); fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true }); fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8'); - fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), ['type: added', 'area: overlay', '', '- Stale release fragment.'].join('\n'), 'utf8'); + fs.writeFileSync( + path.join(projectRoot, 'changes', '001.md'), + ['type: added', 'area: overlay', '', '- Stale release fragment.'].join('\n'), + 'utf8', + ); try { const result = writeChangelogArtifacts({ @@ -125,7 +129,10 @@ test('writeChangelogArtifacts skips changelog prepend when release section alrea const changelog = fs.readFileSync(path.join(projectRoot, 'CHANGELOG.md'), 'utf8'); assert.equal(changelog, existingChangelog); - const releaseNotes = fs.readFileSync(path.join(projectRoot, 'release', 'release-notes.md'), 'utf8'); + const releaseNotes = fs.readFileSync( + path.join(projectRoot, 'release', 'release-notes.md'), + 'utf8', + ); assert.match(releaseNotes, /## Highlights\n### Added\n- Existing release bullet\./); } finally { fs.rmSync(workspace, { recursive: true, force: true }); diff --git a/scripts/build-changelog.ts b/scripts/build-changelog.ts index 76197cc..ac96a61 100644 --- a/scripts/build-changelog.ts +++ b/scripts/build-changelog.ts @@ -354,11 +354,7 @@ export function writeChangelogArtifacts(options?: ChangelogOptions): { log(`Removed ${fragment.path}`); } - const releaseNotesPath = writeReleaseNotesFile( - cwd, - existingReleaseSection, - options?.deps, - ); + const releaseNotesPath = writeReleaseNotesFile(cwd, existingReleaseSection, options?.deps); log(`Generated ${releaseNotesPath}`); return { diff --git a/scripts/patch-modernz.test.ts b/scripts/patch-modernz.test.ts index 7ceadf4..f89069a 100644 --- a/scripts/patch-modernz.test.ts +++ b/scripts/patch-modernz.test.ts @@ -55,19 +55,15 @@ exit 1 `, ); - const result = spawnSync( - 'bash', - ['scripts/patch-modernz.sh', '--target', target], - { - cwd: process.cwd(), - encoding: 'utf8', - env: { - ...process.env, - HOME: path.join(root, 'home'), - PATH: `${binDir}:${process.env.PATH || ''}`, - }, + const result = spawnSync('bash', ['scripts/patch-modernz.sh', '--target', target], { + cwd: process.cwd(), + encoding: 'utf8', + env: { + ...process.env, + HOME: path.join(root, 'home'), + PATH: `${binDir}:${process.env.PATH || ''}`, }, - ); + }); assert.equal(result.status, 1, result.stderr || result.stdout); assert.match(result.stderr, /failed to apply patch to/); diff --git a/scripts/run-coverage-lane.test.ts b/scripts/run-coverage-lane.test.ts new file mode 100644 index 0000000..7c6f0c8 --- /dev/null +++ b/scripts/run-coverage-lane.test.ts @@ -0,0 +1,74 @@ +import assert from 'node:assert/strict'; +import { resolve } from 'node:path'; +import test from 'node:test'; + +import { mergeLcovReports, resolveCoverageDir } from './run-coverage-lane'; + +test('mergeLcovReports combines duplicate source-file counters across shard outputs', () => { + const merged = mergeLcovReports([ + [ + 'SF:src/example.ts', + 'FN:10,alpha', + 'FNDA:1,alpha', + 'DA:10,1', + 'DA:11,0', + 'BRDA:10,0,0,1', + 'BRDA:10,0,1,-', + 'end_of_record', + '', + ].join('\n'), + [ + 'SF:src/example.ts', + 'FN:10,alpha', + 'FN:20,beta', + 'FNDA:2,alpha', + 'FNDA:1,beta', + 'DA:10,2', + 'DA:11,1', + 'DA:20,1', + 'BRDA:10,0,0,0', + 'BRDA:10,0,1,1', + 'end_of_record', + '', + ].join('\n'), + ]); + + assert.match(merged, /SF:src\/example\.ts/); + assert.match(merged, /FN:10,alpha/); + assert.match(merged, /FN:20,beta/); + assert.match(merged, /FNDA:3,alpha/); + assert.match(merged, /FNDA:1,beta/); + assert.match(merged, /FNF:2/); + assert.match(merged, /FNH:2/); + assert.match(merged, /DA:10,3/); + assert.match(merged, /DA:11,1/); + assert.match(merged, /DA:20,1/); + assert.match(merged, /LF:3/); + assert.match(merged, /LH:3/); + assert.match(merged, /BRDA:10,0,0,1/); + assert.match(merged, /BRDA:10,0,1,1/); + assert.match(merged, /BRF:2/); + assert.match(merged, /BRH:2/); +}); + +test('mergeLcovReports keeps distinct source files as separate records', () => { + const merged = mergeLcovReports([ + ['SF:src/a.ts', 'DA:1,1', 'end_of_record', ''].join('\n'), + ['SF:src/b.ts', 'DA:2,1', 'end_of_record', ''].join('\n'), + ]); + + assert.match(merged, /SF:src\/a\.ts[\s\S]*end_of_record/); + assert.match(merged, /SF:src\/b\.ts[\s\S]*end_of_record/); +}); + +test('resolveCoverageDir keeps coverage output inside the repository', () => { + const repoRoot = resolve('/tmp', 'subminer-repo-root'); + + assert.equal(resolveCoverageDir(repoRoot, []), resolve(repoRoot, 'coverage')); + assert.equal( + resolveCoverageDir(repoRoot, ['--coverage-dir', 'coverage/test-src']), + resolve(repoRoot, 'coverage/test-src'), + ); + assert.throws(() => resolveCoverageDir(repoRoot, ['--coverage-dir', '../escape'])); + assert.throws(() => resolveCoverageDir(repoRoot, ['--coverage-dir', '/tmp/escape'])); +}); diff --git a/scripts/run-coverage-lane.ts b/scripts/run-coverage-lane.ts new file mode 100644 index 0000000..964962e --- /dev/null +++ b/scripts/run-coverage-lane.ts @@ -0,0 +1,311 @@ +import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from 'node:fs'; +import { spawnSync } from 'node:child_process'; +import { isAbsolute, join, relative, resolve } from 'node:path'; + +type LaneConfig = { + roots: string[]; + include: string[]; + exclude: Set; +}; + +type LcovRecord = { + sourceFile: string; + functions: Map; + functionHits: Map; + lines: Map; + branches: Map; +}; + +const repoRoot = resolve(__dirname, '..'); + +const lanes: Record = { + 'bun-src-full': { + roots: ['src'], + include: ['.test.ts', '.type-test.ts'], + exclude: new Set([ + 'src/core/services/anki-jimaku-ipc.test.ts', + 'src/core/services/ipc.test.ts', + 'src/core/services/overlay-manager.test.ts', + 'src/main/config-validation.test.ts', + 'src/main/runtime/registry.test.ts', + 'src/main/runtime/startup-config.test.ts', + ]), + }, + 'bun-launcher-unit': { + roots: ['launcher'], + include: ['.test.ts'], + exclude: new Set(['launcher/smoke.e2e.test.ts']), + }, +}; + +function collectFiles(rootDir: string, includeSuffixes: string[], excludeSet: Set): string[] { + const out: string[] = []; + const visit = (currentDir: string) => { + for (const entry of readdirSync(currentDir, { withFileTypes: true })) { + const fullPath = resolve(currentDir, entry.name); + if (entry.isDirectory()) { + visit(fullPath); + continue; + } + const relPath = relative(repoRoot, fullPath).replaceAll('\\', '/'); + if (excludeSet.has(relPath)) continue; + if (includeSuffixes.some((suffix) => relPath.endsWith(suffix))) { + out.push(relPath); + } + } + }; + + visit(resolve(repoRoot, rootDir)); + out.sort(); + return out; +} + +function getLaneFiles(laneName: string): string[] { + const lane = lanes[laneName]; + if (!lane) { + throw new Error(`Unknown coverage lane: ${laneName}`); + } + const files = lane.roots.flatMap((rootDir) => collectFiles(rootDir, lane.include, lane.exclude)); + if (files.length === 0) { + throw new Error(`No test files found for coverage lane: ${laneName}`); + } + return files; +} + +function parseCoverageDirArg(argv: string[]): string { + for (let index = 0; index < argv.length; index += 1) { + if (argv[index] === '--coverage-dir') { + const next = argv[index + 1]; + if (typeof next !== 'string') { + throw new Error('Missing value for --coverage-dir'); + } + return next; + } + } + return 'coverage'; +} + +export function resolveCoverageDir(repoRootDir: string, argv: string[]): string { + const candidate = resolve(repoRootDir, parseCoverageDirArg(argv)); + const rel = relative(repoRootDir, candidate); + if (isAbsolute(rel) || rel.startsWith('..')) { + throw new Error(`--coverage-dir must be within repository: ${candidate}`); + } + return candidate; +} + +function parseLcovReport(report: string): LcovRecord[] { + const records: LcovRecord[] = []; + let current: LcovRecord | null = null; + + const ensureCurrent = (): LcovRecord => { + if (!current) { + throw new Error('Malformed lcov report: record data before SF'); + } + return current; + }; + + for (const rawLine of report.split(/\r?\n/)) { + const line = rawLine.trim(); + if (!line) continue; + if (line.startsWith('TN:')) { + continue; + } + if (line.startsWith('SF:')) { + current = { + sourceFile: line.slice(3), + functions: new Map(), + functionHits: new Map(), + lines: new Map(), + branches: new Map(), + }; + continue; + } + if (line === 'end_of_record') { + if (current) { + records.push(current); + current = null; + } + continue; + } + if (line.startsWith('FN:')) { + const [lineNumber, ...nameParts] = line.slice(3).split(','); + ensureCurrent().functions.set(nameParts.join(','), Number(lineNumber)); + continue; + } + if (line.startsWith('FNDA:')) { + const [hits, ...nameParts] = line.slice(5).split(','); + ensureCurrent().functionHits.set(nameParts.join(','), Number(hits)); + continue; + } + if (line.startsWith('DA:')) { + const [lineNumber, hits] = line.slice(3).split(','); + ensureCurrent().lines.set(Number(lineNumber), Number(hits)); + continue; + } + if (line.startsWith('BRDA:')) { + const [lineNumber, block, branch, hits] = line.slice(5).split(','); + if (lineNumber === undefined || block === undefined || branch === undefined || hits === undefined) { + continue; + } + ensureCurrent().branches.set(`${lineNumber}:${block}:${branch}`, { + line: Number(lineNumber), + block, + branch, + hits: hits === '-' ? null : Number(hits), + }); + } + } + + if (current) { + records.push(current); + } + + return records; +} + +export function mergeLcovReports(reports: string[]): string { + const merged = new Map(); + + for (const report of reports) { + for (const record of parseLcovReport(report)) { + let target = merged.get(record.sourceFile); + if (!target) { + target = { + sourceFile: record.sourceFile, + functions: new Map(), + functionHits: new Map(), + lines: new Map(), + branches: new Map(), + }; + merged.set(record.sourceFile, target); + } + + for (const [name, line] of record.functions) { + if (!target.functions.has(name)) { + target.functions.set(name, line); + } + } + + for (const [name, hits] of record.functionHits) { + target.functionHits.set(name, (target.functionHits.get(name) ?? 0) + hits); + } + + for (const [lineNumber, hits] of record.lines) { + target.lines.set(lineNumber, (target.lines.get(lineNumber) ?? 0) + hits); + } + + for (const [branchKey, branchRecord] of record.branches) { + const existing = target.branches.get(branchKey); + if (!existing) { + target.branches.set(branchKey, { ...branchRecord }); + continue; + } + if (branchRecord.hits === null) { + continue; + } + existing.hits = (existing.hits ?? 0) + branchRecord.hits; + } + } + } + + const chunks: string[] = []; + for (const sourceFile of [...merged.keys()].sort()) { + const record = merged.get(sourceFile)!; + chunks.push(`SF:${record.sourceFile}`); + + const functions = [...record.functions.entries()].sort((a, b) => + a[1] === b[1] ? a[0].localeCompare(b[0]) : a[1] - b[1], + ); + for (const [name, line] of functions) { + chunks.push(`FN:${line},${name}`); + } + for (const [name] of functions) { + chunks.push(`FNDA:${record.functionHits.get(name) ?? 0},${name}`); + } + chunks.push(`FNF:${functions.length}`); + chunks.push(`FNH:${functions.filter(([name]) => (record.functionHits.get(name) ?? 0) > 0).length}`); + + const branches = [...record.branches.values()].sort((a, b) => + a.line === b.line + ? a.block === b.block + ? a.branch.localeCompare(b.branch) + : a.block.localeCompare(b.block) + : a.line - b.line, + ); + for (const branch of branches) { + chunks.push( + `BRDA:${branch.line},${branch.block},${branch.branch},${branch.hits === null ? '-' : branch.hits}`, + ); + } + chunks.push(`BRF:${branches.length}`); + chunks.push(`BRH:${branches.filter((branch) => (branch.hits ?? 0) > 0).length}`); + + const lines = [...record.lines.entries()].sort((a, b) => a[0] - b[0]); + for (const [lineNumber, hits] of lines) { + chunks.push(`DA:${lineNumber},${hits}`); + } + chunks.push(`LF:${lines.length}`); + chunks.push(`LH:${lines.filter(([, hits]) => hits > 0).length}`); + chunks.push('end_of_record'); + } + + return chunks.length > 0 ? `${chunks.join('\n')}\n` : ''; +} + +function runCoverageLane(): number { + const laneName = process.argv[2]; + if (laneName === undefined) { + process.stderr.write('Missing coverage lane name\n'); + return 1; + } + + const coverageDir = resolveCoverageDir(repoRoot, process.argv.slice(3)); + const shardRoot = join(coverageDir, '.shards'); + mkdirSync(coverageDir, { recursive: true }); + rmSync(shardRoot, { recursive: true, force: true }); + mkdirSync(shardRoot, { recursive: true }); + + const files = getLaneFiles(laneName); + const reports: string[] = []; + + try { + for (const [index, file] of files.entries()) { + const shardDir = join(shardRoot, `${String(index + 1).padStart(3, '0')}`); + const result = spawnSync( + 'bun', + ['test', '--coverage', '--coverage-reporter=lcov', '--coverage-dir', shardDir, `./${file}`], + { + cwd: repoRoot, + stdio: 'inherit', + }, + ); + + if (result.error) { + throw result.error; + } + if ((result.status ?? 1) !== 0) { + return result.status ?? 1; + } + + const lcovPath = join(shardDir, 'lcov.info'); + if (!existsSync(lcovPath)) { + process.stdout.write(`Skipping empty coverage shard for ${file}\n`); + continue; + } + + reports.push(readFileSync(lcovPath, 'utf8')); + } + + writeFileSync(join(coverageDir, 'lcov.info'), mergeLcovReports(reports), 'utf8'); + process.stdout.write(`Merged LCOV written to ${relative(repoRoot, join(coverageDir, 'lcov.info'))}\n`); + return 0; + } finally { + rmSync(shardRoot, { recursive: true, force: true }); + } +} + +// @ts-ignore Bun entrypoint detection; TS config for scripts still targets CommonJS. +if (import.meta.main) { + process.exit(runCoverageLane()); +} diff --git a/scripts/subminer-change-verification.test.ts b/scripts/subminer-change-verification.test.ts index b7516f3..6f28db2 100644 --- a/scripts/subminer-change-verification.test.ts +++ b/scripts/subminer-change-verification.test.ts @@ -33,7 +33,7 @@ function runBash(args: string[]) { } function parseArtifactDir(stdout: string): string { - const match = stdout.match(/^artifact_dir=(.+)$/m); + const match = stdout.match(/^artifacts: (.+)$/m); assert.ok(match, `expected artifact_dir in stdout, got:\n${stdout}`); return match[1] ?? ''; } @@ -42,10 +42,17 @@ function readSummaryJson(artifactDir: string) { return JSON.parse(fs.readFileSync(path.join(artifactDir, 'summary.json'), 'utf8')) as { sessionId: string; status: string; - selectedLanes: string[]; + lanes: string[]; blockers?: string[]; artifactDir: string; pathSelectionMode?: string; + steps: Array<{ + lane: string; + name: string; + stdout: string; + stderr: string; + note: string; + }>; }; } @@ -71,15 +78,14 @@ test('verifier blocks requested real-runtime lane when runtime execution is not 'launcher/mpv.ts', ]); - assert.notEqual(result.status, 0, result.stdout); - assert.match(result.stdout, /^result=blocked$/m); + assert.equal(result.status, 0, result.stdout); const summary = readSummaryJson(artifactDir); assert.equal(summary.status, 'blocked'); - assert.deepEqual(summary.selectedLanes, ['real-runtime']); + assert.deepEqual(summary.lanes, ['real-runtime']); assert.ok(summary.sessionId.length > 0); assert.ok(summary.blockers?.some((entry) => entry.includes('--allow-real-runtime'))); - assert.equal(fs.existsSync(path.join(artifactDir, 'reports', 'summary.json')), true); + assert.equal(fs.existsSync(path.join(artifactDir, 'summary.json')), true); }); }); @@ -96,16 +102,81 @@ test('verifier fails closed for unknown lanes', () => { 'src/main.ts', ]); - assert.notEqual(result.status, 0, result.stdout); - assert.match(result.stdout, /^result=failed$/m); + assert.equal(result.status, 0, result.stdout); const summary = readSummaryJson(artifactDir); - assert.equal(summary.status, 'failed'); - assert.deepEqual(summary.selectedLanes, ['not-a-lane']); + assert.equal(summary.status, 'blocked'); + assert.deepEqual(summary.lanes, ['not-a-lane']); assert.ok(summary.blockers?.some((entry) => entry.includes('unknown lane'))); }); }); +test('verifier keeps non-passing step artifacts distinct across lanes', () => { + withTempDir((root) => { + const artifactDir = path.join(root, 'artifacts'); + const result = runBash([ + verifyScript, + '--dry-run', + '--artifact-dir', + artifactDir, + '--lane', + 'docs', + '--lane', + 'not-a-lane', + 'src/main.ts', + ]); + + assert.equal(result.status, 0, result.stdout); + + const summary = readSummaryJson(artifactDir); + const docsStep = summary.steps.find((step) => step.lane === 'docs' && step.name === 'docs-kb'); + const unknownStep = summary.steps.find( + (step) => step.lane === 'not-a-lane' && step.name === 'unknown-lane', + ); + + assert.ok(docsStep); + assert.ok(unknownStep); + assert.notEqual(docsStep?.stdout, unknownStep?.stdout); + assert.equal(fs.existsSync(path.join(artifactDir, docsStep!.stdout)), true); + assert.equal(fs.existsSync(path.join(artifactDir, unknownStep!.stdout)), true); + }); +}); + +test('verifier records the real-runtime lease blocker once', () => { + withTempDir((root) => { + const artifactDir = path.join(root, 'artifacts'); + const leaseDir = path.join( + repoRoot, + '.tmp', + 'skill-verification', + 'locks', + 'exclusive-real-runtime', + ); + fs.mkdirSync(leaseDir, { recursive: true }); + fs.writeFileSync(path.join(leaseDir, 'session_id'), 'other-session'); + + try { + const result = runBash([ + verifyScript, + '--dry-run', + '--artifact-dir', + artifactDir, + '--allow-real-runtime', + '--lane', + 'real-runtime', + 'launcher/mpv.ts', + ]); + + assert.equal(result.status, 0, result.stdout); + + const summary = readSummaryJson(artifactDir); + assert.deepEqual(summary.blockers, ['real-runtime lease already held by other-session']); + } finally { + fs.rmSync(leaseDir, { recursive: true, force: true }); + } + }); +}); + test('verifier allocates unique session ids and artifact roots by default', () => { const first = runBash([verifyScript, '--dry-run', '--lane', 'core', 'src/main.ts']); const second = runBash([verifyScript, '--dry-run', '--lane', 'core', 'src/main.ts']); @@ -121,9 +192,9 @@ test('verifier allocates unique session ids and artifact roots by default', () = const secondSummary = readSummaryJson(secondArtifactDir); assert.notEqual(firstSummary.sessionId, secondSummary.sessionId); - assert.notEqual(firstSummary.artifactDir, secondSummary.artifactDir); - assert.equal(firstSummary.pathSelectionMode, 'explicit'); - assert.equal(secondSummary.pathSelectionMode, 'explicit'); + assert.notEqual(firstArtifactDir, secondArtifactDir); + assert.equal(firstSummary.pathSelectionMode, 'explicit-lanes'); + assert.equal(secondSummary.pathSelectionMode, 'explicit-lanes'); } finally { fs.rmSync(firstArtifactDir, { recursive: true, force: true }); fs.rmSync(secondArtifactDir, { recursive: true, force: true }); diff --git a/scripts/test-plugin-start-gate.lua b/scripts/test-plugin-start-gate.lua index 5f45f83..59f95cc 100644 --- a/scripts/test-plugin-start-gate.lua +++ b/scripts/test-plugin-start-gate.lua @@ -822,6 +822,92 @@ do ) end +do + local recorded, err = run_plugin_scenario({ + process_list = "", + option_overrides = { + binary_path = binary_path, + auto_start = "yes", + auto_start_visible_overlay = "yes", + auto_start_pause_until_ready = "yes", + socket_path = "/tmp/subminer-socket", + }, + input_ipc_server = "/tmp/subminer-socket", + media_title = "Random Movie", + files = { + [binary_path] = true, + }, + }) + assert_true(recorded ~= nil, "plugin failed to load for manual toggle-off ready scenario: " .. tostring(err)) + fire_event(recorded, "file-loaded") + assert_true(recorded.script_messages["subminer-toggle"] ~= nil, "subminer-toggle script message not registered") + recorded.script_messages["subminer-toggle"]() + assert_true( + count_control_calls(recorded.async_calls, "--toggle-visible-overlay") == 1, + "manual toggle should use explicit visible-overlay toggle command" + ) + recorded.script_messages["subminer-autoplay-ready"]() + assert_true( + count_control_calls(recorded.async_calls, "--show-visible-overlay") == 1, + "manual toggle-off before readiness should suppress ready-time visible overlay restore" + ) +end + +do + local recorded, err = run_plugin_scenario({ + process_list = "", + option_overrides = { + binary_path = binary_path, + auto_start = "yes", + auto_start_visible_overlay = "yes", + auto_start_pause_until_ready = "yes", + socket_path = "/tmp/subminer-socket", + }, + input_ipc_server = "/tmp/subminer-socket", + media_title = "Random Movie", + files = { + [binary_path] = true, + }, + }) + assert_true( + recorded ~= nil, + "plugin failed to load for repeated ready restore suppression scenario: " .. tostring(err) + ) + fire_event(recorded, "file-loaded") + assert_true(recorded.script_messages["subminer-toggle"] ~= nil, "subminer-toggle script message not registered") + recorded.script_messages["subminer-toggle"]() + recorded.script_messages["subminer-autoplay-ready"]() + recorded.script_messages["subminer-autoplay-ready"]() + assert_true( + count_control_calls(recorded.async_calls, "--show-visible-overlay") == 1, + "manual toggle-off should suppress repeated ready-time visible overlay restores for the same session" + ) +end + +do + local recorded, err = run_plugin_scenario({ + process_list = "", + option_overrides = { + binary_path = binary_path, + auto_start = "no", + }, + files = { + [binary_path] = true, + }, + }) + assert_true(recorded ~= nil, "plugin failed to load for manual toggle command scenario: " .. tostring(err)) + assert_true(recorded.script_messages["subminer-toggle"] ~= nil, "subminer-toggle script message not registered") + recorded.script_messages["subminer-toggle"]() + assert_true( + count_control_calls(recorded.async_calls, "--toggle-visible-overlay") == 1, + "script-message toggle should issue explicit visible-overlay toggle command" + ) + assert_true( + count_control_calls(recorded.async_calls, "--toggle") == 0, + "script-message toggle should not issue legacy generic toggle command" + ) +end + do local recorded, err = run_plugin_scenario({ process_list = "", diff --git a/scripts/update-aur-package.test.ts b/scripts/update-aur-package.test.ts index 8189538..b9a52cc 100644 --- a/scripts/update-aur-package.test.ts +++ b/scripts/update-aur-package.test.ts @@ -47,8 +47,8 @@ test('update-aur-package updates PKGBUILD and .SRCINFO without makepkg', () => { const pkgbuild = fs.readFileSync(path.join(pkgDir, 'PKGBUILD'), 'utf8'); const srcinfo = fs.readFileSync(path.join(pkgDir, '.SRCINFO'), 'utf8'); - const expectedSums = [appImagePath, wrapperPath, assetsPath].map((filePath) => - execFileSync('sha256sum', [filePath], { encoding: 'utf8' }).split(/\s+/)[0], + const expectedSums = [appImagePath, wrapperPath, assetsPath].map( + (filePath) => execFileSync('sha256sum', [filePath], { encoding: 'utf8' }).split(/\s+/)[0], ); assert.match(pkgbuild, /^pkgver=0\.6\.3$/m); diff --git a/src/anki-field-config.ts b/src/anki-field-config.ts index b87f047..5861742 100644 --- a/src/anki-field-config.ts +++ b/src/anki-field-config.ts @@ -1,4 +1,4 @@ -import type { AnkiConnectConfig } from './types'; +import type { AnkiConnectConfig } from './types/anki'; type NoteFieldValue = { value?: string } | string | null | undefined; @@ -8,7 +8,9 @@ function normalizeFieldName(value: string | null | undefined): string | null { return trimmed.length > 0 ? trimmed : null; } -export function getConfiguredWordFieldName(config?: Pick | null): string { +export function getConfiguredWordFieldName( + config?: Pick | null, +): string { return normalizeFieldName(config?.fields?.word) ?? 'Expression'; } diff --git a/src/anki-integration.ts b/src/anki-integration.ts index 5ba86b7..8282d70 100644 --- a/src/anki-integration.ts +++ b/src/anki-integration.ts @@ -21,15 +21,15 @@ import { SubtitleTimingTracker } from './subtitle-timing-tracker'; import { MediaGenerator } from './media-generator'; import path from 'path'; import { - AiConfig, AnkiConnectConfig, KikuDuplicateCardInfo, KikuFieldGroupingChoice, KikuMergePreviewResponse, - MpvClient, NotificationOptions, - NPlusOneMatchMode, -} from './types'; +} from './types/anki'; +import { AiConfig } from './types/integrations'; +import { MpvClient } from './types/runtime'; +import { NPlusOneMatchMode } from './types/subtitle'; import { DEFAULT_ANKI_CONNECT_CONFIG } from './config'; import { getConfiguredWordFieldCandidates, @@ -212,10 +212,7 @@ export class AnkiIntegration { try { this.recordCardsMinedCallback(count, noteIds); } catch (error) { - log.warn( - `recordCardsMined callback failed during ${source}:`, - (error as Error).message, - ); + log.warn(`recordCardsMined callback failed during ${source}:`, (error as Error).message); } } diff --git a/src/anki-integration/ai.ts b/src/anki-integration/ai.ts index 034c420..8e39ef2 100644 --- a/src/anki-integration/ai.ts +++ b/src/anki-integration/ai.ts @@ -1,4 +1,4 @@ -import type { AiConfig } from '../types'; +import type { AiConfig } from '../types/integrations'; import { requestAiChatCompletion } from '../ai/client'; const DEFAULT_AI_SYSTEM_PROMPT = diff --git a/src/anki-integration/animated-image-sync.test.ts b/src/anki-integration/animated-image-sync.test.ts index c0d25cf..c70d2aa 100644 --- a/src/anki-integration/animated-image-sync.test.ts +++ b/src/anki-integration/animated-image-sync.test.ts @@ -4,10 +4,10 @@ import test from 'node:test'; import { resolveAnimatedImageLeadInSeconds, extractSoundFilenames } from './animated-image-sync'; test('extractSoundFilenames returns ordered sound filenames from an Anki field value', () => { - assert.deepEqual( - extractSoundFilenames('before [sound:word.mp3] middle [sound:alt.ogg] after'), - ['word.mp3', 'alt.ogg'], - ); + assert.deepEqual(extractSoundFilenames('before [sound:word.mp3] middle [sound:alt.ogg] after'), [ + 'word.mp3', + 'alt.ogg', + ]); }); test('resolveAnimatedImageLeadInSeconds sums configured word audio durations for animated images', async () => { diff --git a/src/anki-integration/animated-image-sync.ts b/src/anki-integration/animated-image-sync.ts index 9a53df9..2528287 100644 --- a/src/anki-integration/animated-image-sync.ts +++ b/src/anki-integration/animated-image-sync.ts @@ -4,7 +4,7 @@ import * as os from 'node:os'; import * as path from 'node:path'; import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config'; -import type { AnkiConnectConfig } from '../types'; +import type { AnkiConnectConfig } from '../types/anki'; type NoteInfoLike = { noteId: number; @@ -36,9 +36,7 @@ export function extractSoundFilenames(value: string): string[] { } function shouldSyncAnimatedImageToWordAudio(config: Pick): boolean { - return ( - config.media?.imageType === 'avif' && config.media?.syncAnimatedImageToWordAudio !== false - ); + return config.media?.imageType === 'avif' && config.media?.syncAnimatedImageToWordAudio !== false; } export async function probeAudioDurationSeconds( diff --git a/src/anki-integration/card-creation.test.ts b/src/anki-integration/card-creation.test.ts index 5cbe245..03cd42c 100644 --- a/src/anki-integration/card-creation.test.ts +++ b/src/anki-integration/card-creation.test.ts @@ -2,7 +2,7 @@ import assert from 'node:assert/strict'; import test from 'node:test'; import { CardCreationService } from './card-creation'; -import type { AnkiConnectConfig } from '../types'; +import type { AnkiConnectConfig } from '../types/anki'; test('CardCreationService counts locally created sentence cards', async () => { const minedCards: Array<{ count: number; noteIds?: number[] }> = []; diff --git a/src/anki-integration/card-creation.ts b/src/anki-integration/card-creation.ts index 6495fa8..4364348 100644 --- a/src/anki-integration/card-creation.ts +++ b/src/anki-integration/card-creation.ts @@ -3,10 +3,11 @@ import { getConfiguredWordFieldName, getPreferredWordValueFromExtractedFields, } from '../anki-field-config'; -import { AiConfig, AnkiConnectConfig } from '../types'; +import { AnkiConnectConfig } from '../types/anki'; import { createLogger } from '../logger'; import { SubtitleTimingTracker } from '../subtitle-timing-tracker'; -import { MpvClient } from '../types'; +import { AiConfig } from '../types/integrations'; +import { MpvClient } from '../types/runtime'; import { resolveSentenceBackText } from './ai'; import { resolveMediaGenerationInputPath } from './media-source'; diff --git a/src/anki-integration/duplicate.ts b/src/anki-integration/duplicate.ts index 992390d..1e722a0 100644 --- a/src/anki-integration/duplicate.ts +++ b/src/anki-integration/duplicate.ts @@ -179,7 +179,10 @@ function getDuplicateSourceCandidates( const fallbackFieldName = configuredFieldNames[0]?.toLowerCase() || 'expression'; const fallbackKey = `${fallbackFieldName}:${normalizeDuplicateValue(trimmedFallback)}`; if (!dedupeKey.has(fallbackKey)) { - candidates.push({ fieldName: configuredFieldNames[0] || 'Expression', value: trimmedFallback }); + candidates.push({ + fieldName: configuredFieldNames[0] || 'Expression', + value: trimmedFallback, + }); } } diff --git a/src/anki-integration/field-grouping-merge.test.ts b/src/anki-integration/field-grouping-merge.test.ts new file mode 100644 index 0000000..18deec3 --- /dev/null +++ b/src/anki-integration/field-grouping-merge.test.ts @@ -0,0 +1,201 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { + FieldGroupingMergeCollaborator, + type FieldGroupingMergeNoteInfo, +} from './field-grouping-merge'; +import type { AnkiConnectConfig } from '../types/anki'; + +function resolveFieldName(availableFieldNames: string[], preferredName: string): string | null { + return ( + availableFieldNames.find( + (name) => name === preferredName || name.toLowerCase() === preferredName.toLowerCase(), + ) ?? null + ); +} + +function createCollaborator( + options: { + config?: Partial; + currentSubtitleText?: string; + generatedMedia?: { + audioField?: string; + audioValue?: string; + imageField?: string; + imageValue?: string; + miscInfoValue?: string; + }; + warnings?: Array<{ fieldName: string; reason: string; detail?: string }>; + } = {}, +) { + const warnings = options.warnings ?? []; + const config = { + fields: { + sentence: 'Sentence', + audio: 'ExpressionAudio', + image: 'Picture', + miscInfo: 'MiscInfo', + ...(options.config?.fields ?? {}), + }, + ...(options.config ?? {}), + } as AnkiConnectConfig; + + return { + collaborator: new FieldGroupingMergeCollaborator({ + getConfig: () => config, + getEffectiveSentenceCardConfig: () => ({ + sentenceField: 'Sentence', + audioField: 'SentenceAudio', + }), + getCurrentSubtitleText: () => options.currentSubtitleText, + resolveFieldName, + resolveNoteFieldName: (noteInfo, preferredName) => { + if (!preferredName) return null; + return resolveFieldName(Object.keys(noteInfo.fields), preferredName); + }, + extractFields: (fields) => + Object.fromEntries( + Object.entries(fields).map(([key, value]) => [key.toLowerCase(), value.value || '']), + ), + processSentence: (mpvSentence) => `${mpvSentence}::processed`, + generateMediaForMerge: async () => options.generatedMedia ?? {}, + warnFieldParseOnce: (fieldName, reason, detail) => { + warnings.push({ fieldName, reason, detail }); + }, + }), + warnings, + }; +} + +function makeNote(noteId: number, fields: Record): FieldGroupingMergeNoteInfo { + return { + noteId, + fields: Object.fromEntries(Object.entries(fields).map(([key, value]) => [key, { value }])), + }; +} + +test('getGroupableFieldNames includes configured fields without duplicating ExpressionAudio', () => { + const { collaborator } = createCollaborator({ + config: { + fields: { + image: 'Illustration', + sentence: 'SentenceText', + audio: 'ExpressionAudio', + miscInfo: 'ExtraInfo', + }, + }, + }); + + assert.deepEqual(collaborator.getGroupableFieldNames(), [ + 'Sentence', + 'SentenceAudio', + 'Picture', + 'Illustration', + 'SentenceText', + 'ExtraInfo', + 'SentenceFurigana', + ]); +}); + +test('computeFieldGroupingMergedFields syncs a custom audio field from merged SentenceAudio', async () => { + const { collaborator } = createCollaborator({ + config: { + fields: { + audio: 'CustomAudio', + }, + }, + }); + + const merged = await collaborator.computeFieldGroupingMergedFields( + 1, + 2, + makeNote(1, { + SentenceAudio: '[sound:keep.mp3]', + CustomAudio: '[sound:stale.mp3]', + }), + makeNote(2, { + SentenceAudio: '[sound:new.mp3]', + }), + false, + ); + + assert.equal( + merged.SentenceAudio, + '[sound:keep.mp3][sound:new.mp3]', + ); + assert.equal(merged.CustomAudio, merged.SentenceAudio); +}); + +test('computeFieldGroupingMergedFields keeps strict fields when source is empty and warns on malformed spans', async () => { + const { collaborator, warnings } = createCollaborator({ + currentSubtitleText: 'subtitle line', + }); + + const merged = await collaborator.computeFieldGroupingMergedFields( + 3, + 4, + makeNote(3, { + Sentence: 'keep sentence', + SentenceAudio: '', + }), + makeNote(4, { + Sentence: 'source sentence', + SentenceAudio: '[sound:source.mp3]', + }), + false, + ); + + assert.equal( + merged.Sentence, + 'keep sentencesource sentence', + ); + assert.equal(merged.SentenceAudio, '[sound:source.mp3]'); + assert.equal(warnings.length, 4); + assert.deepEqual( + warnings.map((entry) => entry.reason), + ['invalid-group-id', 'no-usable-span-entries', 'invalid-group-id', 'no-usable-span-entries'], + ); +}); + +test('computeFieldGroupingMergedFields uses generated media only when includeGeneratedMedia is true', async () => { + const generatedMedia = { + audioField: 'SentenceAudio', + audioValue: '[sound:generated.mp3]', + imageField: 'Picture', + imageValue: '', + miscInfoValue: 'generated misc', + }; + const { collaborator: withoutGenerated } = createCollaborator({ generatedMedia }); + const { collaborator: withGenerated } = createCollaborator({ generatedMedia }); + + const keep = makeNote(10, { + SentenceAudio: '', + Picture: '', + MiscInfo: '', + }); + const source = makeNote(11, { + SentenceAudio: '', + Picture: '', + MiscInfo: '', + }); + + const without = await withoutGenerated.computeFieldGroupingMergedFields( + 10, + 11, + keep, + source, + false, + ); + const withMedia = await withGenerated.computeFieldGroupingMergedFields( + 10, + 11, + keep, + source, + true, + ); + + assert.deepEqual(without, {}); + assert.equal(withMedia.SentenceAudio, '[sound:generated.mp3]'); + assert.equal(withMedia.Picture, ''); + assert.equal(withMedia.MiscInfo, 'generated misc'); +}); diff --git a/src/anki-integration/field-grouping-merge.ts b/src/anki-integration/field-grouping-merge.ts index 4384b49..5ddcced 100644 --- a/src/anki-integration/field-grouping-merge.ts +++ b/src/anki-integration/field-grouping-merge.ts @@ -1,4 +1,4 @@ -import { AnkiConnectConfig } from '../types'; +import { AnkiConnectConfig } from '../types/anki'; import { getConfiguredWordFieldName } from '../anki-field-config'; interface FieldGroupingMergeMedia { diff --git a/src/anki-integration/field-grouping-workflow.test.ts b/src/anki-integration/field-grouping-workflow.test.ts index 1c02015..361ae04 100644 --- a/src/anki-integration/field-grouping-workflow.test.ts +++ b/src/anki-integration/field-grouping-workflow.test.ts @@ -1,7 +1,7 @@ import test from 'node:test'; import assert from 'node:assert/strict'; import { FieldGroupingWorkflow } from './field-grouping-workflow'; -import type { KikuDuplicateCardInfo, KikuFieldGroupingChoice } from '../types'; +import type { KikuDuplicateCardInfo, KikuFieldGroupingChoice } from '../types/anki'; type NoteInfo = { noteId: number; diff --git a/src/anki-integration/field-grouping-workflow.ts b/src/anki-integration/field-grouping-workflow.ts index 34cad8f..3369bd5 100644 --- a/src/anki-integration/field-grouping-workflow.ts +++ b/src/anki-integration/field-grouping-workflow.ts @@ -1,4 +1,4 @@ -import { KikuDuplicateCardInfo, KikuFieldGroupingChoice } from '../types'; +import { KikuDuplicateCardInfo, KikuFieldGroupingChoice } from '../types/anki'; import { getPreferredWordValueFromExtractedFields } from '../anki-field-config'; export interface FieldGroupingWorkflowNoteInfo { @@ -181,7 +181,8 @@ export class FieldGroupingWorkflow { return { noteId: noteInfo.noteId, expression: - getPreferredWordValueFromExtractedFields(fields, this.deps.getConfig()) || fallbackExpression, + getPreferredWordValueFromExtractedFields(fields, this.deps.getConfig()) || + fallbackExpression, sentencePreview: this.deps.truncateSentence( fields[(sentenceCardConfig.sentenceField || 'sentence').toLowerCase()] || (isOriginal ? '' : this.deps.getCurrentSubtitleText() || ''), diff --git a/src/anki-integration/field-grouping.test.ts b/src/anki-integration/field-grouping.test.ts new file mode 100644 index 0000000..f7f2e01 --- /dev/null +++ b/src/anki-integration/field-grouping.test.ts @@ -0,0 +1,411 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { FieldGroupingService } from './field-grouping'; +import type { KikuMergePreviewResponse } from '../types/anki'; + +type NoteInfo = { + noteId: number; + fields: Record; +}; + +function createHarness( + options: { + kikuEnabled?: boolean; + kikuFieldGrouping?: 'auto' | 'manual' | 'disabled'; + deck?: string; + noteIds?: number[]; + notesInfo?: NoteInfo[][]; + duplicateNoteId?: number | null; + hasAllConfiguredFields?: boolean; + manualHandled?: boolean; + expression?: string | null; + currentSentenceImageField?: string | undefined; + onProcessNewCard?: (noteId: number, options?: { skipKikuFieldGrouping?: boolean }) => void; + } = {}, +) { + const calls: string[] = []; + const findNotesQueries: Array<{ query: string; maxRetries?: number }> = []; + const noteInfoRequests: number[][] = []; + const duplicateRequests: Array<{ expression: string; excludeNoteId: number }> = []; + const processCalls: Array<{ noteId: number; options?: { skipKikuFieldGrouping?: boolean } }> = []; + const autoCalls: Array<{ originalNoteId: number; newNoteId: number; expression: string }> = []; + const manualCalls: Array<{ originalNoteId: number; newNoteId: number; expression: string }> = []; + + const noteInfoQueue = [...(options.notesInfo ?? [])]; + const notes = options.noteIds ?? [2]; + + const service = new FieldGroupingService({ + getConfig: () => ({ + fields: { + word: 'Expression', + }, + }), + getEffectiveSentenceCardConfig: () => ({ + model: 'Sentence', + sentenceField: 'Sentence', + audioField: 'SentenceAudio', + lapisEnabled: false, + kikuEnabled: options.kikuEnabled ?? true, + kikuFieldGrouping: options.kikuFieldGrouping ?? 'auto', + kikuDeleteDuplicateInAuto: true, + }), + isUpdateInProgress: () => false, + getDeck: options.deck ? () => options.deck : undefined, + withUpdateProgress: async (_message, action) => { + calls.push('withUpdateProgress'); + return action(); + }, + showOsdNotification: (text) => { + calls.push(`osd:${text}`); + }, + findNotes: async (query, findNotesOptions) => { + findNotesQueries.push({ query, maxRetries: findNotesOptions?.maxRetries }); + return notes; + }, + notesInfo: async (noteIds) => { + noteInfoRequests.push([...noteIds]); + return noteInfoQueue.shift() ?? []; + }, + extractFields: (fields) => + Object.fromEntries( + Object.entries(fields).map(([key, value]) => [key.toLowerCase(), value.value || '']), + ), + findDuplicateNote: async (expression, excludeNoteId) => { + duplicateRequests.push({ expression, excludeNoteId }); + return options.duplicateNoteId ?? 99; + }, + hasAllConfiguredFields: () => options.hasAllConfiguredFields ?? true, + processNewCard: async (noteId, processOptions) => { + processCalls.push({ noteId, options: processOptions }); + options.onProcessNewCard?.(noteId, processOptions); + }, + getSentenceCardImageFieldName: () => options.currentSentenceImageField, + resolveFieldName: (availableFieldNames, preferredName) => + availableFieldNames.find( + (name) => name === preferredName || name.toLowerCase() === preferredName.toLowerCase(), + ) ?? null, + computeFieldGroupingMergedFields: async () => ({}), + getNoteFieldMap: (noteInfo) => + Object.fromEntries( + Object.entries(noteInfo.fields).map(([key, value]) => [key, value.value || '']), + ), + handleFieldGroupingAuto: async (originalNoteId, newNoteId, _newNoteInfo, expression) => { + autoCalls.push({ originalNoteId, newNoteId, expression }); + }, + handleFieldGroupingManual: async (originalNoteId, newNoteId, _newNoteInfo, expression) => { + manualCalls.push({ originalNoteId, newNoteId, expression }); + return options.manualHandled ?? true; + }, + }); + + return { + service, + calls, + findNotesQueries, + noteInfoRequests, + duplicateRequests, + processCalls, + autoCalls, + manualCalls, + }; +} + +type SuccessfulPreview = KikuMergePreviewResponse & { + ok: true; + compact: { + action: { + keepNoteId: number; + deleteNoteId: number; + deleteDuplicate: boolean; + }; + mergedFields: Record; + }; + full: { + result: { + wouldDeleteNoteId: number | null; + }; + }; +}; + +test('triggerFieldGroupingForLastAddedCard stops when kiku mode is disabled', async () => { + const harness = createHarness({ kikuEnabled: false }); + + await harness.service.triggerFieldGroupingForLastAddedCard(); + + assert.deepEqual(harness.calls, ['osd:Kiku mode is not enabled']); + assert.equal(harness.findNotesQueries.length, 0); +}); + +test('triggerFieldGroupingForLastAddedCard stops when field grouping is disabled', async () => { + const harness = createHarness({ kikuFieldGrouping: 'disabled' }); + + await harness.service.triggerFieldGroupingForLastAddedCard(); + + assert.deepEqual(harness.calls, ['osd:Kiku field grouping is disabled']); + assert.equal(harness.findNotesQueries.length, 0); +}); + +test('triggerFieldGroupingForLastAddedCard stops when an update is already in progress', async () => { + const service = new FieldGroupingService({ + getConfig: () => ({ fields: { word: 'Expression' } }), + getEffectiveSentenceCardConfig: () => ({ + model: 'Sentence', + sentenceField: 'Sentence', + audioField: 'SentenceAudio', + lapisEnabled: false, + kikuEnabled: true, + kikuFieldGrouping: 'auto', + kikuDeleteDuplicateInAuto: true, + }), + isUpdateInProgress: () => true, + withUpdateProgress: async () => { + throw new Error('should not be called'); + }, + showOsdNotification: () => {}, + findNotes: async () => [], + notesInfo: async () => [], + extractFields: () => ({}), + findDuplicateNote: async () => null, + hasAllConfiguredFields: () => true, + processNewCard: async () => {}, + getSentenceCardImageFieldName: () => undefined, + resolveFieldName: () => null, + computeFieldGroupingMergedFields: async () => ({}), + getNoteFieldMap: () => ({}), + handleFieldGroupingAuto: async () => {}, + handleFieldGroupingManual: async () => true, + }); + + await service.triggerFieldGroupingForLastAddedCard(); +}); + +test('triggerFieldGroupingForLastAddedCard finds the newest note and hands off to auto grouping', async () => { + const harness = createHarness({ + deck: 'Anime Deck', + noteIds: [3, 7, 5], + notesInfo: [ + [ + { + noteId: 7, + fields: { + Expression: { value: 'word-7' }, + Sentence: { value: 'line-7' }, + }, + }, + ], + [ + { + noteId: 7, + fields: { + Expression: { value: 'word-7' }, + Sentence: { value: 'line-7' }, + }, + }, + ], + ], + duplicateNoteId: 42, + hasAllConfiguredFields: true, + }); + + await harness.service.triggerFieldGroupingForLastAddedCard(); + + assert.deepEqual(harness.findNotesQueries, [ + { query: '"deck:Anime Deck" added:1', maxRetries: undefined }, + ]); + assert.deepEqual(harness.noteInfoRequests, [[7], [7]]); + assert.deepEqual(harness.duplicateRequests, [{ expression: 'word-7', excludeNoteId: 7 }]); + assert.deepEqual(harness.autoCalls, [ + { + originalNoteId: 42, + newNoteId: 7, + expression: 'word-7', + }, + ]); +}); + +test('triggerFieldGroupingForLastAddedCard refreshes the card when configured fields are missing', async () => { + const processCalls: Array<{ noteId: number; options?: { skipKikuFieldGrouping?: boolean } }> = []; + const harness = createHarness({ + noteIds: [11], + notesInfo: [ + [ + { + noteId: 11, + fields: { + Expression: { value: 'word-11' }, + Sentence: { value: 'line-11' }, + }, + }, + ], + [ + { + noteId: 11, + fields: { + Expression: { value: 'word-11' }, + Sentence: { value: 'line-11' }, + }, + }, + ], + ], + duplicateNoteId: 13, + hasAllConfiguredFields: false, + onProcessNewCard: (noteId, options) => { + processCalls.push({ noteId, options }); + }, + }); + + await harness.service.triggerFieldGroupingForLastAddedCard(); + + assert.deepEqual(processCalls, [{ noteId: 11, options: { skipKikuFieldGrouping: true } }]); + assert.deepEqual(harness.manualCalls, []); +}); + +test('triggerFieldGroupingForLastAddedCard shows a cancellation message when manual grouping is declined', async () => { + const harness = createHarness({ + kikuFieldGrouping: 'manual', + noteIds: [9], + notesInfo: [ + [ + { + noteId: 9, + fields: { + Expression: { value: 'word-9' }, + Sentence: { value: 'line-9' }, + }, + }, + ], + [ + { + noteId: 9, + fields: { + Expression: { value: 'word-9' }, + Sentence: { value: 'line-9' }, + }, + }, + ], + ], + duplicateNoteId: 77, + manualHandled: false, + }); + + await harness.service.triggerFieldGroupingForLastAddedCard(); + + assert.deepEqual(harness.manualCalls, [ + { + originalNoteId: 77, + newNoteId: 9, + expression: 'word-9', + }, + ]); + assert.equal(harness.calls.at(-1), 'osd:Field grouping cancelled'); +}); + +test('buildFieldGroupingPreview returns merged compact and full previews', async () => { + const service = new FieldGroupingService({ + getConfig: () => ({ fields: { word: 'Expression' } }), + getEffectiveSentenceCardConfig: () => ({ + model: 'Sentence', + sentenceField: 'Sentence', + audioField: 'SentenceAudio', + lapisEnabled: false, + kikuEnabled: true, + kikuFieldGrouping: 'auto', + kikuDeleteDuplicateInAuto: true, + }), + isUpdateInProgress: () => false, + withUpdateProgress: async (_message, action) => action(), + showOsdNotification: () => {}, + findNotes: async () => [], + notesInfo: async (noteIds) => + noteIds.map((noteId) => ({ + noteId, + fields: { + Sentence: { value: `sentence-${noteId}` }, + SentenceAudio: { value: `[sound:${noteId}.mp3]` }, + Picture: { value: `` }, + MiscInfo: { value: `misc-${noteId}` }, + }, + })), + extractFields: () => ({}), + findDuplicateNote: async () => null, + hasAllConfiguredFields: () => true, + processNewCard: async () => {}, + getSentenceCardImageFieldName: () => undefined, + resolveFieldName: (availableFieldNames, preferredName) => + availableFieldNames.find( + (name) => name === preferredName || name.toLowerCase() === preferredName.toLowerCase(), + ) ?? null, + computeFieldGroupingMergedFields: async () => ({ + Sentence: 'merged sentence', + SentenceAudio: 'merged audio', + Picture: 'merged picture', + MiscInfo: 'merged misc', + }), + getNoteFieldMap: (noteInfo) => + Object.fromEntries( + Object.entries(noteInfo.fields).map(([key, value]) => [key, value.value || '']), + ), + handleFieldGroupingAuto: async () => {}, + handleFieldGroupingManual: async () => true, + }); + + const preview = await service.buildFieldGroupingPreview(1, 2, true); + + assert.equal(preview.ok, true); + if (!preview.ok) { + throw new Error(preview.error); + } + const successPreview = preview as SuccessfulPreview; + assert.deepEqual(successPreview.compact.action, { + keepNoteId: 1, + deleteNoteId: 2, + deleteDuplicate: true, + }); + assert.equal(successPreview.compact.mergedFields.Sentence, 'merged sentence'); + assert.equal(successPreview.full.result.wouldDeleteNoteId, 2); +}); + +test('buildFieldGroupingPreview reports missing notes cleanly', async () => { + const service = new FieldGroupingService({ + getConfig: () => ({ fields: { word: 'Expression' } }), + getEffectiveSentenceCardConfig: () => ({ + model: 'Sentence', + sentenceField: 'Sentence', + audioField: 'SentenceAudio', + lapisEnabled: false, + kikuEnabled: true, + kikuFieldGrouping: 'auto', + kikuDeleteDuplicateInAuto: true, + }), + isUpdateInProgress: () => false, + withUpdateProgress: async (_message, action) => action(), + showOsdNotification: () => {}, + findNotes: async () => [], + notesInfo: async () => [ + { + noteId: 1, + fields: { + Sentence: { value: 'sentence-1' }, + }, + }, + ], + extractFields: () => ({}), + findDuplicateNote: async () => null, + hasAllConfiguredFields: () => true, + processNewCard: async () => {}, + getSentenceCardImageFieldName: () => undefined, + resolveFieldName: () => null, + computeFieldGroupingMergedFields: async () => ({}), + getNoteFieldMap: () => ({}), + handleFieldGroupingAuto: async () => {}, + handleFieldGroupingManual: async () => true, + }); + + const preview = await service.buildFieldGroupingPreview(1, 2, false); + + assert.equal(preview.ok, false); + if (preview.ok) { + throw new Error('expected preview to fail'); + } + assert.equal(preview.error, 'Could not load selected notes'); +}); diff --git a/src/anki-integration/field-grouping.ts b/src/anki-integration/field-grouping.ts index 363b9a5..cc692e8 100644 --- a/src/anki-integration/field-grouping.ts +++ b/src/anki-integration/field-grouping.ts @@ -1,4 +1,4 @@ -import { KikuMergePreviewResponse } from '../types'; +import { KikuMergePreviewResponse } from '../types/anki'; import { createLogger } from '../logger'; import { getPreferredWordValueFromExtractedFields } from '../anki-field-config'; diff --git a/src/anki-integration/known-word-cache.test.ts b/src/anki-integration/known-word-cache.test.ts index aacf46b..4db0db9 100644 --- a/src/anki-integration/known-word-cache.test.ts +++ b/src/anki-integration/known-word-cache.test.ts @@ -4,7 +4,7 @@ import fs from 'node:fs'; import os from 'node:os'; import path from 'node:path'; -import type { AnkiConnectConfig } from '../types'; +import type { AnkiConnectConfig } from '../types/anki'; import { KnownWordCacheManager } from './known-word-cache'; async function waitForCondition( @@ -85,13 +85,15 @@ test('KnownWordCacheManager startLifecycle keeps fresh persisted cache without i }, }; const { manager, calls, statePath, cleanup } = createKnownWordCacheHarness(config); + const originalDateNow = Date.now; try { + Date.now = () => 120_000; fs.writeFileSync( statePath, JSON.stringify({ version: 2, - refreshedAtMs: Date.now(), + refreshedAtMs: 120_000, scope: '{"refreshMinutes":60,"scope":"is:note","fieldsWord":""}', words: ['猫'], notes: { @@ -102,12 +104,20 @@ test('KnownWordCacheManager startLifecycle keeps fresh persisted cache without i ); manager.startLifecycle(); - await new Promise((resolve) => setTimeout(resolve, 25)); assert.equal(manager.isKnownWord('猫'), true); assert.equal(calls.findNotes, 0); assert.equal(calls.notesInfo, 0); + assert.equal( + ( + manager as unknown as { + getMsUntilNextRefresh: () => number; + } + ).getMsUntilNextRefresh() > 0, + true, + ); } finally { + Date.now = originalDateNow; manager.stopLifecycle(); cleanup(); } @@ -124,13 +134,15 @@ test('KnownWordCacheManager startLifecycle immediately refreshes stale persisted }, }; const { manager, calls, statePath, clientState, cleanup } = createKnownWordCacheHarness(config); + const originalDateNow = Date.now; try { + Date.now = () => 120_000; fs.writeFileSync( statePath, JSON.stringify({ version: 2, - refreshedAtMs: Date.now() - 61_000, + refreshedAtMs: 59_000, scope: '{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}', words: ['猫'], notes: { @@ -156,6 +168,7 @@ test('KnownWordCacheManager startLifecycle immediately refreshes stale persisted assert.equal(manager.isKnownWord('猫'), false); assert.equal(manager.isKnownWord('犬'), true); } finally { + Date.now = originalDateNow; manager.stopLifecycle(); cleanup(); } @@ -351,10 +364,7 @@ test('KnownWordCacheManager preserves cache state key captured before refresh wo scope: string; words: string[]; }; - assert.equal( - persisted.scope, - '{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}', - ); + assert.equal(persisted.scope, '{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}'); assert.deepEqual(persisted.words, ['猫']); } finally { fs.rmSync(stateDir, { recursive: true, force: true }); diff --git a/src/anki-integration/known-word-cache.ts b/src/anki-integration/known-word-cache.ts index 24433d3..a4de17c 100644 --- a/src/anki-integration/known-word-cache.ts +++ b/src/anki-integration/known-word-cache.ts @@ -3,7 +3,7 @@ import path from 'path'; import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config'; import { getConfiguredWordFieldName } from '../anki-field-config'; -import { AnkiConnectConfig } from '../types'; +import { AnkiConnectConfig } from '../types/anki'; import { createLogger } from '../logger'; const log = createLogger('anki').child('integration.known-word-cache'); @@ -316,9 +316,9 @@ export class KnownWordCacheManager { const currentDeck = this.deps.getConfig().deck?.trim(); const selectedDeckEntry = currentDeck !== undefined && currentDeck.length > 0 - ? trimmedDeckEntries.find(([deckName]) => deckName === currentDeck) ?? null + ? (trimmedDeckEntries.find(([deckName]) => deckName === currentDeck) ?? null) : trimmedDeckEntries.length === 1 - ? trimmedDeckEntries[0] ?? null + ? (trimmedDeckEntries[0] ?? null) : null; if (!selectedDeckEntry) { @@ -329,7 +329,10 @@ export class KnownWordCacheManager { if (Array.isArray(deckFields)) { const normalizedFields = [ ...new Set( - deckFields.map(String).map((field) => field.trim()).filter((field) => field.length > 0), + deckFields + .map(String) + .map((field) => field.trim()) + .filter((field) => field.length > 0), ), ]; if (normalizedFields.length > 0) { @@ -353,7 +356,14 @@ export class KnownWordCacheManager { continue; } const normalizedFields = Array.isArray(fields) - ? [...new Set(fields.map(String).map((field) => field.trim()).filter(Boolean))] + ? [ + ...new Set( + fields + .map(String) + .map((field) => field.trim()) + .filter(Boolean), + ), + ] : []; scopes.push({ query: `deck:"${escapeAnkiSearchValue(trimmedDeckName)}"`, @@ -402,7 +412,10 @@ export class KnownWordCacheManager { private async fetchKnownWordNoteFieldsById(): Promise> { const scopes = this.getKnownWordQueryScopes(); const noteFieldsById = new Map(); - log.debug('Refreshing known-word cache', `queries=${scopes.map((scope) => scope.query).join(' | ')}`); + log.debug( + 'Refreshing known-word cache', + `queries=${scopes.map((scope) => scope.query).join(' | ')}`, + ); for (const scope of scopes) { const noteIds = (await this.deps.client.findNotes(scope.query, { @@ -414,10 +427,7 @@ export class KnownWordCacheManager { continue; } const existingFields = noteFieldsById.get(noteId) ?? []; - noteFieldsById.set( - noteId, - [...new Set([...existingFields, ...scope.fields])], - ); + noteFieldsById.set(noteId, [...new Set([...existingFields, ...scope.fields])]); } } diff --git a/src/anki-integration/media-source.ts b/src/anki-integration/media-source.ts index 36adaa2..21f9838 100644 --- a/src/anki-integration/media-source.ts +++ b/src/anki-integration/media-source.ts @@ -1,5 +1,5 @@ import { isRemoteMediaPath } from '../jimaku/utils'; -import type { MpvClient } from '../types'; +import type { MpvClient } from '../types/runtime'; export type MediaGenerationKind = 'audio' | 'video'; @@ -50,7 +50,7 @@ function resolvePreferredUrlFromMpvEdlSource( // mpv EDL sources usually list audio streams first and video streams last, so // when classifyMediaUrl cannot identify a typed URL we fall back to stream order. - return kind === 'audio' ? urls[0] ?? null : urls[urls.length - 1] ?? null; + return kind === 'audio' ? (urls[0] ?? null) : (urls[urls.length - 1] ?? null); } export async function resolveMediaGenerationInputPath( diff --git a/src/anki-integration/polling.test.ts b/src/anki-integration/polling.test.ts index 93a330e..550ba2b 100644 --- a/src/anki-integration/polling.test.ts +++ b/src/anki-integration/polling.test.ts @@ -4,35 +4,41 @@ import test from 'node:test'; import { PollingRunner } from './polling'; test('polling runner records newly added cards after initialization', async () => { + const originalDateNow = Date.now; const recordedCards: number[] = []; let tracked = new Set(); const responses = [ [10, 11], [10, 11, 12, 13], ]; - const runner = new PollingRunner({ - getDeck: () => 'Mining', - getPollingRate: () => 250, - findNotes: async () => responses.shift() ?? [], - shouldAutoUpdateNewCards: () => true, - processNewCard: async () => undefined, - recordCardsAdded: (count) => { - recordedCards.push(count); - }, - isUpdateInProgress: () => false, - setUpdateInProgress: () => undefined, - getTrackedNoteIds: () => tracked, - setTrackedNoteIds: (noteIds) => { - tracked = noteIds; - }, - showStatusNotification: () => undefined, - logDebug: () => undefined, - logInfo: () => undefined, - logWarn: () => undefined, - }); + try { + Date.now = () => 120_000; + const runner = new PollingRunner({ + getDeck: () => 'Mining', + getPollingRate: () => 250, + findNotes: async () => responses.shift() ?? [], + shouldAutoUpdateNewCards: () => true, + processNewCard: async () => undefined, + recordCardsAdded: (count) => { + recordedCards.push(count); + }, + isUpdateInProgress: () => false, + setUpdateInProgress: () => undefined, + getTrackedNoteIds: () => tracked, + setTrackedNoteIds: (noteIds) => { + tracked = noteIds; + }, + showStatusNotification: () => undefined, + logDebug: () => undefined, + logInfo: () => undefined, + logWarn: () => undefined, + }); - await runner.pollOnce(); - await runner.pollOnce(); + await runner.pollOnce(); + await runner.pollOnce(); - assert.deepEqual(recordedCards, [2]); + assert.deepEqual(recordedCards, [2]); + } finally { + Date.now = originalDateNow; + } }); diff --git a/src/anki-integration/runtime.test.ts b/src/anki-integration/runtime.test.ts index e234f15..99d0a36 100644 --- a/src/anki-integration/runtime.test.ts +++ b/src/anki-integration/runtime.test.ts @@ -2,7 +2,7 @@ import test from 'node:test'; import assert from 'node:assert/strict'; import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config'; -import type { AnkiConnectConfig } from '../types'; +import type { AnkiConnectConfig } from '../types/anki'; import { AnkiIntegrationRuntime } from './runtime'; function createRuntime( diff --git a/src/anki-integration/runtime.ts b/src/anki-integration/runtime.ts index df1ef9f..c1327b9 100644 --- a/src/anki-integration/runtime.ts +++ b/src/anki-integration/runtime.ts @@ -1,5 +1,5 @@ import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config'; -import type { AnkiConnectConfig } from '../types'; +import type { AnkiConnectConfig } from '../types/anki'; import { getKnownWordCacheLifecycleConfig, getKnownWordCacheRefreshIntervalMinutes, diff --git a/src/anki-integration/ui-feedback.ts b/src/anki-integration/ui-feedback.ts index ea43e70..f9f53d6 100644 --- a/src/anki-integration/ui-feedback.ts +++ b/src/anki-integration/ui-feedback.ts @@ -1,4 +1,4 @@ -import { NotificationOptions } from '../types'; +import { NotificationOptions } from '../types/anki'; export interface UiFeedbackState { progressDepth: number; diff --git a/src/ci-workflow.test.ts b/src/ci-workflow.test.ts index 7c8e47b..fb43a24 100644 --- a/src/ci-workflow.test.ts +++ b/src/ci-workflow.test.ts @@ -5,6 +5,10 @@ import { resolve } from 'node:path'; const ciWorkflowPath = resolve(__dirname, '../.github/workflows/ci.yml'); const ciWorkflow = readFileSync(ciWorkflowPath, 'utf8'); +const packageJsonPath = resolve(__dirname, '../package.json'); +const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as { + scripts: Record; +}; test('ci workflow lints changelog fragments', () => { assert.match(ciWorkflow, /bun run changelog:lint/); @@ -18,3 +22,17 @@ test('ci workflow checks pull requests for required changelog fragments', () => test('ci workflow verifies generated config examples stay in sync', () => { assert.match(ciWorkflow, /bun run verify:config-example/); }); + +test('package scripts expose a sharded maintained source coverage lane with lcov output', () => { + assert.equal( + packageJson.scripts['test:coverage:src'], + 'bun run build:yomitan && bun run scripts/run-coverage-lane.ts bun-src-full --coverage-dir coverage/test-src', + ); +}); + +test('ci workflow runs the maintained source coverage lane and uploads lcov output', () => { + assert.match(ciWorkflow, /name: Coverage suite \(maintained source lane\)/); + assert.match(ciWorkflow, /run: bun run test:coverage:src/); + assert.match(ciWorkflow, /name: Upload coverage artifact/); + assert.match(ciWorkflow, /path: coverage\/test-src\/lcov\.info/); +}); diff --git a/src/config/config.test.ts b/src/config/config.test.ts index ada4876..1c7e7a7 100644 --- a/src/config/config.test.ts +++ b/src/config/config.test.ts @@ -4,7 +4,7 @@ import * as fs from 'fs'; import * as os from 'os'; import * as path from 'path'; import { ConfigService, ConfigStartupParseError } from './service'; -import { DEFAULT_CONFIG, RUNTIME_OPTION_REGISTRY } from './definitions'; +import { DEFAULT_CONFIG, RUNTIME_OPTION_REGISTRY, deepMergeRawConfig } from './definitions'; import { generateConfigTemplate } from './template'; function makeTempDir(): string { @@ -1032,6 +1032,61 @@ test('reloadConfigStrict parse failure does not mutate raw config or warnings', assert.deepEqual(service.getWarnings(), beforeWarnings); }); +test('SM-012 config paths do not use JSON serialize-clone helpers', () => { + const definitionsSource = fs.readFileSync( + path.join(process.cwd(), 'src/config/definitions.ts'), + 'utf-8', + ); + const serviceSource = fs.readFileSync(path.join(process.cwd(), 'src/config/service.ts'), 'utf-8'); + + assert.equal(definitionsSource.includes('JSON.parse(JSON.stringify('), false); + assert.equal(serviceSource.includes('JSON.parse(JSON.stringify('), false); +}); + +test('getRawConfig returns a detached clone', () => { + const dir = makeTempDir(); + fs.writeFileSync( + path.join(dir, 'config.jsonc'), + `{ + "ankiConnect": { + "tags": ["SubMiner"] + } + }`, + 'utf-8', + ); + + const service = new ConfigService(dir); + const raw = service.getRawConfig(); + raw.ankiConnect!.tags!.push('mutated'); + + assert.deepEqual(service.getRawConfig().ankiConnect?.tags, ['SubMiner']); +}); + +test('deepMergeRawConfig returns a detached merged clone', () => { + const base = { + ankiConnect: { + tags: ['SubMiner'], + behavior: { + autoUpdateNewCards: true, + }, + }, + }; + + const merged = deepMergeRawConfig(base, { + ankiConnect: { + behavior: { + autoUpdateNewCards: false, + }, + }, + }); + + merged.ankiConnect!.tags!.push('mutated'); + merged.ankiConnect!.behavior!.autoUpdateNewCards = true; + + assert.deepEqual(base.ankiConnect?.tags, ['SubMiner']); + assert.equal(base.ankiConnect?.behavior?.autoUpdateNewCards, true); +}); + test('warning emission order is deterministic across reloads', () => { const dir = makeTempDir(); const configPath = path.join(dir, 'config.jsonc'); @@ -1325,8 +1380,14 @@ test('controller descriptor config rejects malformed binding objects', () => { config.controller.bindings.leftStickHorizontal, DEFAULT_CONFIG.controller.bindings.leftStickHorizontal, ); - assert.equal(warnings.some((warning) => warning.path === 'controller.bindings.toggleLookup'), true); - assert.equal(warnings.some((warning) => warning.path === 'controller.bindings.closeLookup'), true); + assert.equal( + warnings.some((warning) => warning.path === 'controller.bindings.toggleLookup'), + true, + ); + assert.equal( + warnings.some((warning) => warning.path === 'controller.bindings.closeLookup'), + true, + ); assert.equal( warnings.some((warning) => warning.path === 'controller.bindings.leftStickHorizontal'), true, diff --git a/src/config/definitions.ts b/src/config/definitions.ts index 0692e66..9d58399 100644 --- a/src/config/definitions.ts +++ b/src/config/definitions.ts @@ -1,4 +1,4 @@ -import { RawConfig, ResolvedConfig } from '../types'; +import { RawConfig, ResolvedConfig } from '../types/config'; import { CORE_DEFAULT_CONFIG } from './definitions/defaults-core'; import { IMMERSION_DEFAULT_CONFIG } from './definitions/defaults-immersion'; import { INTEGRATIONS_DEFAULT_CONFIG } from './definitions/defaults-integrations'; @@ -84,11 +84,11 @@ export const CONFIG_OPTION_REGISTRY = [ export { CONFIG_TEMPLATE_SECTIONS }; export function deepCloneConfig(config: ResolvedConfig): ResolvedConfig { - return JSON.parse(JSON.stringify(config)) as ResolvedConfig; + return structuredClone(config); } export function deepMergeRawConfig(base: RawConfig, patch: RawConfig): RawConfig { - const clone = JSON.parse(JSON.stringify(base)) as Record; + const clone = structuredClone(base) as Record; const patchObject = patch as Record; const mergeInto = (target: Record, source: Record): void => { diff --git a/src/config/definitions/defaults-core.ts b/src/config/definitions/defaults-core.ts index 4d42915..302cfce 100644 --- a/src/config/definitions/defaults-core.ts +++ b/src/config/definitions/defaults-core.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; export const CORE_DEFAULT_CONFIG: Pick< ResolvedConfig, diff --git a/src/config/definitions/defaults-immersion.ts b/src/config/definitions/defaults-immersion.ts index ffd04fb..2641181 100644 --- a/src/config/definitions/defaults-immersion.ts +++ b/src/config/definitions/defaults-immersion.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; export const IMMERSION_DEFAULT_CONFIG: Pick = { immersionTracking: { diff --git a/src/config/definitions/defaults-integrations.ts b/src/config/definitions/defaults-integrations.ts index 761a2e0..af38b21 100644 --- a/src/config/definitions/defaults-integrations.ts +++ b/src/config/definitions/defaults-integrations.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; export const INTEGRATIONS_DEFAULT_CONFIG: Pick< ResolvedConfig, @@ -129,6 +129,7 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick< }, discordPresence: { enabled: false, + presenceStyle: 'default' as const, updateIntervalMs: 3_000, debounceMs: 750, }, diff --git a/src/config/definitions/defaults-stats.ts b/src/config/definitions/defaults-stats.ts index 3b4bb81..c3838d2 100644 --- a/src/config/definitions/defaults-stats.ts +++ b/src/config/definitions/defaults-stats.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types.js'; +import { ResolvedConfig } from '../../types/config.js'; export const STATS_DEFAULT_CONFIG: Pick = { stats: { diff --git a/src/config/definitions/defaults-subtitle.ts b/src/config/definitions/defaults-subtitle.ts index 25bbd0f..a581169 100644 --- a/src/config/definitions/defaults-subtitle.ts +++ b/src/config/definitions/defaults-subtitle.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; export const SUBTITLE_DEFAULT_CONFIG: Pick = { subtitleStyle: { diff --git a/src/config/definitions/options-core.ts b/src/config/definitions/options-core.ts index 1ab1fd4..463eb6e 100644 --- a/src/config/definitions/options-core.ts +++ b/src/config/definitions/options-core.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; import { ConfigOptionRegistryEntry } from './shared'; export function buildCoreConfigOptionRegistry( @@ -263,7 +263,8 @@ export function buildCoreConfigOptionRegistry( { path: `controller.bindings.${binding.id}.axisIndex`, kind: 'number' as const, - defaultValue: binding.defaultValue.kind === 'axis' ? binding.defaultValue.axisIndex : undefined, + defaultValue: + binding.defaultValue.kind === 'axis' ? binding.defaultValue.axisIndex : undefined, description: 'Raw axis index captured for this discrete controller action.', }, { @@ -293,7 +294,8 @@ export function buildCoreConfigOptionRegistry( { path: `controller.bindings.${binding.id}.axisIndex`, kind: 'number' as const, - defaultValue: binding.defaultValue.kind === 'axis' ? binding.defaultValue.axisIndex : undefined, + defaultValue: + binding.defaultValue.kind === 'axis' ? binding.defaultValue.axisIndex : undefined, description: 'Raw axis index captured for this analog controller action.', }, { @@ -302,7 +304,8 @@ export function buildCoreConfigOptionRegistry( enumValues: ['none', 'horizontal', 'vertical'], defaultValue: binding.defaultValue.kind === 'axis' ? binding.defaultValue.dpadFallback : undefined, - description: 'Optional D-pad fallback used when this analog controller action should also read D-pad input.', + description: + 'Optional D-pad fallback used when this analog controller action should also read D-pad input.', }, ]), { diff --git a/src/config/definitions/options-immersion.ts b/src/config/definitions/options-immersion.ts index 6957dbb..2a5c4bd 100644 --- a/src/config/definitions/options-immersion.ts +++ b/src/config/definitions/options-immersion.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; import { ConfigOptionRegistryEntry } from './shared'; export function buildImmersionConfigOptionRegistry( diff --git a/src/config/definitions/options-integrations.ts b/src/config/definitions/options-integrations.ts index ff5feff..5e60605 100644 --- a/src/config/definitions/options-integrations.ts +++ b/src/config/definitions/options-integrations.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; import { ConfigOptionRegistryEntry, RuntimeOptionRegistryEntry } from './shared'; export function buildIntegrationConfigOptionRegistry( @@ -323,6 +323,13 @@ export function buildIntegrationConfigOptionRegistry( defaultValue: defaultConfig.discordPresence.enabled, description: 'Enable optional Discord Rich Presence updates.', }, + { + path: 'discordPresence.presenceStyle', + kind: 'string', + defaultValue: defaultConfig.discordPresence.presenceStyle, + description: + 'Presence card text preset: "default" (clean bilingual), "meme" (Mining and crafting), "japanese" (fully JP), or "minimal".', + }, { path: 'discordPresence.updateIntervalMs', kind: 'number', @@ -369,13 +376,15 @@ export function buildIntegrationConfigOptionRegistry( path: 'youtubeSubgen.whisperBin', kind: 'string', defaultValue: defaultConfig.youtubeSubgen.whisperBin, - description: 'Legacy compatibility path kept for external subtitle fallback tools; not used by default.', + description: + 'Legacy compatibility path kept for external subtitle fallback tools; not used by default.', }, { path: 'youtubeSubgen.whisperModel', kind: 'string', defaultValue: defaultConfig.youtubeSubgen.whisperModel, - description: 'Legacy compatibility model path kept for external subtitle fallback tooling; not used by default.', + description: + 'Legacy compatibility model path kept for external subtitle fallback tooling; not used by default.', }, { path: 'youtubeSubgen.whisperVadModel', diff --git a/src/config/definitions/options-stats.ts b/src/config/definitions/options-stats.ts index 16657e6..131bea3 100644 --- a/src/config/definitions/options-stats.ts +++ b/src/config/definitions/options-stats.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types.js'; +import { ResolvedConfig } from '../../types/config.js'; import { ConfigOptionRegistryEntry } from './shared.js'; export function buildStatsConfigOptionRegistry( @@ -15,7 +15,8 @@ export function buildStatsConfigOptionRegistry( path: 'stats.markWatchedKey', kind: 'string', defaultValue: defaultConfig.stats.markWatchedKey, - description: 'Key code to mark the current video as watched and advance to the next playlist entry.', + description: + 'Key code to mark the current video as watched and advance to the next playlist entry.', }, { path: 'stats.serverPort', diff --git a/src/config/definitions/options-subtitle.ts b/src/config/definitions/options-subtitle.ts index 9b2d294..5445eb2 100644 --- a/src/config/definitions/options-subtitle.ts +++ b/src/config/definitions/options-subtitle.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; import { ConfigOptionRegistryEntry } from './shared'; export function buildSubtitleConfigOptionRegistry( diff --git a/src/config/definitions/runtime-options.ts b/src/config/definitions/runtime-options.ts index afba727..a8ca7b9 100644 --- a/src/config/definitions/runtime-options.ts +++ b/src/config/definitions/runtime-options.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; import { RuntimeOptionRegistryEntry } from './shared'; export function buildRuntimeOptionRegistry( diff --git a/src/config/definitions/shared.ts b/src/config/definitions/shared.ts index 9a0c636..26ac978 100644 --- a/src/config/definitions/shared.ts +++ b/src/config/definitions/shared.ts @@ -1,11 +1,11 @@ -import { - AnkiConnectConfig, - ResolvedConfig, +import type { AnkiConnectConfig } from '../../types/anki'; +import type { ResolvedConfig } from '../../types/config'; +import type { RuntimeOptionId, RuntimeOptionScope, RuntimeOptionValue, RuntimeOptionValueType, -} from '../../types'; +} from '../../types/runtime-options'; export type ConfigValueKind = 'boolean' | 'number' | 'string' | 'enum' | 'array' | 'object'; diff --git a/src/config/load.ts b/src/config/load.ts index d8a4bc1..355422e 100644 --- a/src/config/load.ts +++ b/src/config/load.ts @@ -1,5 +1,5 @@ import * as fs from 'fs'; -import { RawConfig } from '../types'; +import { RawConfig } from '../types/config'; import { parseConfigContent } from './parse'; export interface ConfigPaths { diff --git a/src/config/resolve.ts b/src/config/resolve.ts index c520e7c..6e203c4 100644 --- a/src/config/resolve.ts +++ b/src/config/resolve.ts @@ -1,4 +1,4 @@ -import { ConfigValidationWarning, RawConfig, ResolvedConfig } from '../types'; +import { ConfigValidationWarning, RawConfig, ResolvedConfig } from '../types/config'; import { applyAnkiConnectResolution } from './resolve/anki-connect'; import { createResolveContext } from './resolve/context'; import { applyCoreDomainConfig } from './resolve/core-domains'; diff --git a/src/config/resolve/context.ts b/src/config/resolve/context.ts index abae21d..f88c3e6 100644 --- a/src/config/resolve/context.ts +++ b/src/config/resolve/context.ts @@ -1,4 +1,4 @@ -import { ConfigValidationWarning, RawConfig, ResolvedConfig } from '../../types'; +import { ConfigValidationWarning, RawConfig, ResolvedConfig } from '../../types/config'; import { DEFAULT_CONFIG, deepCloneConfig } from '../definitions'; import { createWarningCollector } from '../warnings'; import { isObject } from './shared'; diff --git a/src/config/resolve/core-domains.ts b/src/config/resolve/core-domains.ts index 9920e7e..6d356b8 100644 --- a/src/config/resolve/core-domains.ts +++ b/src/config/resolve/core-domains.ts @@ -8,7 +8,7 @@ import type { ControllerDiscreteBindingConfig, ResolvedControllerAxisBinding, ResolvedControllerDiscreteBinding, -} from '../../types'; +} from '../../types/runtime'; import { ResolveContext } from './context'; import { asBoolean, asNumber, asString, isObject } from './shared'; @@ -27,7 +27,12 @@ const CONTROLLER_BUTTON_BINDINGS = [ 'rightTrigger', ] as const; -const CONTROLLER_AXIS_BINDINGS = ['leftStickX', 'leftStickY', 'rightStickX', 'rightStickY'] as const; +const CONTROLLER_AXIS_BINDINGS = [ + 'leftStickX', + 'leftStickY', + 'rightStickX', + 'rightStickY', +] as const; const CONTROLLER_AXIS_INDEX_BY_BINDING: Record = { leftStickX: 0, @@ -98,7 +103,9 @@ function parseDiscreteBindingObject(value: unknown): ResolvedControllerDiscreteB return { kind: 'none' }; } if (value.kind === 'button') { - return typeof value.buttonIndex === 'number' && Number.isInteger(value.buttonIndex) && value.buttonIndex >= 0 + return typeof value.buttonIndex === 'number' && + Number.isInteger(value.buttonIndex) && + value.buttonIndex >= 0 ? { kind: 'button', buttonIndex: value.buttonIndex } : null; } @@ -121,7 +128,11 @@ function parseAxisBindingObject( return { kind: 'none' }; } if (!isObject(value) || value.kind !== 'axis') return null; - if (typeof value.axisIndex !== 'number' || !Number.isInteger(value.axisIndex) || value.axisIndex < 0) { + if ( + typeof value.axisIndex !== 'number' || + !Number.isInteger(value.axisIndex) || + value.axisIndex < 0 + ) { return null; } if (value.dpadFallback !== undefined && !isControllerDpadFallback(value.dpadFallback)) { @@ -368,7 +379,9 @@ export function applyCoreDomainConfig(context: ResolveContext): void { const legacyValue = asString(bindingValue); if ( legacyValue !== undefined && - CONTROLLER_BUTTON_BINDINGS.includes(legacyValue as (typeof CONTROLLER_BUTTON_BINDINGS)[number]) + CONTROLLER_BUTTON_BINDINGS.includes( + legacyValue as (typeof CONTROLLER_BUTTON_BINDINGS)[number], + ) ) { resolved.controller.bindings[key] = resolveLegacyDiscreteBinding( legacyValue as ControllerButtonBinding, @@ -401,7 +414,9 @@ export function applyCoreDomainConfig(context: ResolveContext): void { const legacyValue = asString(bindingValue); if ( legacyValue !== undefined && - CONTROLLER_AXIS_BINDINGS.includes(legacyValue as (typeof CONTROLLER_AXIS_BINDINGS)[number]) + CONTROLLER_AXIS_BINDINGS.includes( + legacyValue as (typeof CONTROLLER_AXIS_BINDINGS)[number], + ) ) { resolved.controller.bindings[key] = resolveLegacyAxisBinding( legacyValue as ControllerAxisBinding, diff --git a/src/config/resolve/immersion-tracking.ts b/src/config/resolve/immersion-tracking.ts index c3cf1e8..a5a399b 100644 --- a/src/config/resolve/immersion-tracking.ts +++ b/src/config/resolve/immersion-tracking.ts @@ -1,5 +1,8 @@ import { ResolveContext } from './context'; -import { ImmersionTrackingRetentionMode, ImmersionTrackingRetentionPreset } from '../../types'; +import { + ImmersionTrackingRetentionMode, + ImmersionTrackingRetentionPreset, +} from '../../types/integrations'; import { asBoolean, asNumber, asString, isObject } from './shared'; const DEFAULT_RETENTION_MODE: ImmersionTrackingRetentionMode = 'preset'; diff --git a/src/config/resolve/stats.ts b/src/config/resolve/stats.ts index ba2641b..745b7b9 100644 --- a/src/config/resolve/stats.ts +++ b/src/config/resolve/stats.ts @@ -17,7 +17,12 @@ export function applyStatsConfig(context: ResolveContext): void { if (markWatchedKey !== undefined) { resolved.stats.markWatchedKey = markWatchedKey; } else if (src.stats.markWatchedKey !== undefined) { - warn('stats.markWatchedKey', src.stats.markWatchedKey, resolved.stats.markWatchedKey, 'Expected string.'); + warn( + 'stats.markWatchedKey', + src.stats.markWatchedKey, + resolved.stats.markWatchedKey, + 'Expected string.', + ); } const serverPort = asNumber(src.stats.serverPort); diff --git a/src/config/resolve/subtitle-domains.ts b/src/config/resolve/subtitle-domains.ts index 81d77d1..2bf032c 100644 --- a/src/config/resolve/subtitle-domains.ts +++ b/src/config/resolve/subtitle-domains.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../../types'; +import { ResolvedConfig } from '../../types/config'; import { ResolveContext } from './context'; import { asBoolean, @@ -467,7 +467,9 @@ export function applySubtitleDomainConfig(context: ResolveContext): void { ); if (pauseVideoOnHover !== undefined) { resolved.subtitleSidebar.pauseVideoOnHover = pauseVideoOnHover; - } else if ((src.subtitleSidebar as { pauseVideoOnHover?: unknown }).pauseVideoOnHover !== undefined) { + } else if ( + (src.subtitleSidebar as { pauseVideoOnHover?: unknown }).pauseVideoOnHover !== undefined + ) { resolved.subtitleSidebar.pauseVideoOnHover = fallback.pauseVideoOnHover; warn( 'subtitleSidebar.pauseVideoOnHover', diff --git a/src/config/resolve/subtitle-sidebar.test.ts b/src/config/resolve/subtitle-sidebar.test.ts index 7c23247..6b9b9c0 100644 --- a/src/config/resolve/subtitle-sidebar.test.ts +++ b/src/config/resolve/subtitle-sidebar.test.ts @@ -49,7 +49,10 @@ test('subtitleSidebar accepts zero opacity', () => { applySubtitleDomainConfig(context); assert.equal(context.resolved.subtitleSidebar.opacity, 0); - assert.equal(warnings.some((warning) => warning.path === 'subtitleSidebar.opacity'), false); + assert.equal( + warnings.some((warning) => warning.path === 'subtitleSidebar.opacity'), + false, + ); }); test('subtitleSidebar falls back and warns on invalid values', () => { diff --git a/src/config/service.ts b/src/config/service.ts index 339c581..4d76dc0 100644 --- a/src/config/service.ts +++ b/src/config/service.ts @@ -1,6 +1,6 @@ import * as fs from 'fs'; import * as path from 'path'; -import { ConfigValidationWarning, RawConfig, ResolvedConfig } from '../types'; +import { ConfigValidationWarning, RawConfig, ResolvedConfig } from '../types/config'; import { DEFAULT_CONFIG, deepCloneConfig, deepMergeRawConfig } from './definitions'; import { ConfigPaths, loadRawConfig, loadRawConfigStrict } from './load'; import { resolveConfig } from './resolve'; @@ -61,7 +61,7 @@ export class ConfigService { } getRawConfig(): RawConfig { - return JSON.parse(JSON.stringify(this.rawConfig)) as RawConfig; + return structuredClone(this.rawConfig); } getWarnings(): ConfigValidationWarning[] { diff --git a/src/config/template.ts b/src/config/template.ts index 6c07f72..42bf448 100644 --- a/src/config/template.ts +++ b/src/config/template.ts @@ -1,4 +1,4 @@ -import { ResolvedConfig } from '../types'; +import { ResolvedConfig } from '../types/config'; import { CONFIG_OPTION_REGISTRY, CONFIG_TEMPLATE_SECTIONS, diff --git a/src/config/warnings.ts b/src/config/warnings.ts index ffa95ff..755920f 100644 --- a/src/config/warnings.ts +++ b/src/config/warnings.ts @@ -1,4 +1,4 @@ -import { ConfigValidationWarning } from '../types'; +import { ConfigValidationWarning } from '../types/config'; export interface WarningCollector { warnings: ConfigValidationWarning[]; diff --git a/src/core/services/__tests__/stats-server.test.ts b/src/core/services/__tests__/stats-server.test.ts index 721ddc6..471ebe3 100644 --- a/src/core/services/__tests__/stats-server.test.ts +++ b/src/core/services/__tests__/stats-server.test.ts @@ -3,7 +3,7 @@ import assert from 'node:assert/strict'; import fs from 'node:fs'; import os from 'node:os'; import path from 'node:path'; -import { createStatsApp } from '../stats-server.js'; +import { createStatsApp, startStatsServer } from '../stats-server.js'; import type { ImmersionTrackerService } from '../immersion-tracker-service.js'; const SESSION_SUMMARIES = [ @@ -1110,4 +1110,80 @@ describe('stats server API routes', () => { assert.equal(res.headers.get('content-type'), 'image/jpeg'); assert.equal(ensureCalls, 1); }); + + it('starts the stats server with Bun.serve', () => { + type BunRuntime = { + Bun: { + serve: (options: { fetch: unknown; port: number; hostname: string }) => { + stop: () => void; + }; + }; + }; + + const bun = globalThis as typeof globalThis & BunRuntime; + const originalServe = bun.Bun.serve; + let servedWith: { fetch: unknown; port: number; hostname: string } | null = null; + let stopCalls = 0; + + bun.Bun.serve = (options: { fetch: unknown; port: number; hostname: string }) => { + servedWith = options; + return { + stop: () => { + stopCalls += 1; + }, + }; + }; + + try { + const server = startStatsServer({ + port: 3210, + staticDir: fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-stats-server-start-')), + tracker: createMockTracker(), + }); + + if (servedWith === null) { + throw new Error('expected Bun.serve to be called'); + } + + const servedOptions = servedWith as { + fetch: unknown; + port: number; + hostname: string; + }; + assert.equal(servedOptions.port, 3210); + assert.equal(servedOptions.hostname, '127.0.0.1'); + assert.equal(typeof servedOptions.fetch, 'function'); + + server.close(); + assert.equal(stopCalls, 1); + } finally { + bun.Bun.serve = originalServe; + } + }); + + it('falls back to node:http when Bun.serve is unavailable', () => { + type BunRuntime = { + Bun: { + serve?: (options: { fetch: unknown; port: number; hostname: string }) => { + stop: () => void; + }; + }; + }; + + const bun = globalThis as typeof globalThis & BunRuntime; + const originalServe = bun.Bun.serve; + bun.Bun.serve = undefined; + + try { + const server = startStatsServer({ + port: 0, + staticDir: fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-stats-server-node-')), + tracker: createMockTracker(), + }); + + server.close(); + } finally { + bun.Bun.serve = originalServe; + } + }); }); diff --git a/src/core/services/anilist/anilist-token-store.ts b/src/core/services/anilist/anilist-token-store.ts index a1739da..b34ce03 100644 --- a/src/core/services/anilist/anilist-token-store.ts +++ b/src/core/services/anilist/anilist-token-store.ts @@ -1,6 +1,6 @@ import * as fs from 'fs'; -import * as path from 'path'; import * as electron from 'electron'; +import { ensureDirForFile } from '../../../shared/fs-utils'; interface PersistedTokenPayload { encryptedToken?: string; @@ -21,15 +21,8 @@ export interface SafeStorageLike { getSelectedStorageBackend?: () => string; } -function ensureDirectory(filePath: string): void { - const dir = path.dirname(filePath); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } -} - function writePayload(filePath: string, payload: PersistedTokenPayload): void { - ensureDirectory(filePath); + ensureDirForFile(filePath); fs.writeFileSync(filePath, JSON.stringify(payload, null, 2), 'utf-8'); } diff --git a/src/core/services/anilist/anilist-update-queue.test.ts b/src/core/services/anilist/anilist-update-queue.test.ts index dace595..4f0901c 100644 --- a/src/core/services/anilist/anilist-update-queue.test.ts +++ b/src/core/services/anilist/anilist-update-queue.test.ts @@ -51,7 +51,7 @@ test('anilist update queue applies retry backoff and dead-letter', () => { const loggerState = createLogger(); const queue = createAnilistUpdateQueue(queueFile, loggerState.logger); - const now = 1_700_000_000_000; + const now = 1_700_000 * 1_000_000; queue.enqueue('k2', 'Backoff Demo', 2); queue.markFailure('k2', 'fail-1', now); @@ -62,7 +62,7 @@ test('anilist update queue applies retry backoff and dead-letter', () => { pending: Array<{ attemptCount: number; nextAttemptAt: number }>; }; assert.equal(pendingPayload.pending[0]?.attemptCount, 1); - assert.equal(pendingPayload.pending[0]?.nextAttemptAt, now + 30_000); + assert.equal((pendingPayload.pending[0]?.nextAttemptAt ?? now) - now, 30_000); for (let attempt = 2; attempt <= 8; attempt += 1) { queue.markFailure('k2', `fail-${attempt}`, now); diff --git a/src/core/services/anilist/anilist-update-queue.ts b/src/core/services/anilist/anilist-update-queue.ts index 71e1339..d51d358 100644 --- a/src/core/services/anilist/anilist-update-queue.ts +++ b/src/core/services/anilist/anilist-update-queue.ts @@ -1,5 +1,5 @@ import * as fs from 'fs'; -import * as path from 'path'; +import { ensureDirForFile } from '../../../shared/fs-utils'; const INITIAL_BACKOFF_MS = 30_000; const MAX_BACKOFF_MS = 6 * 60 * 60 * 1000; @@ -35,13 +35,6 @@ export interface AnilistUpdateQueue { getSnapshot: (nowMs?: number) => AnilistRetryQueueSnapshot; } -function ensureDir(filePath: string): void { - const dir = path.dirname(filePath); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } -} - function clampBackoffMs(attemptCount: number): number { const computed = INITIAL_BACKOFF_MS * Math.pow(2, Math.max(0, attemptCount - 1)); return Math.min(MAX_BACKOFF_MS, computed); @@ -60,7 +53,7 @@ export function createAnilistUpdateQueue( const persist = () => { try { - ensureDir(filePath); + ensureDirForFile(filePath); const payload: AnilistRetryQueuePayload = { pending, deadLetter }; fs.writeFileSync(filePath, JSON.stringify(payload, null, 2), 'utf-8'); } catch (error) { diff --git a/src/core/services/anilist/cover-art-fetcher.test.ts b/src/core/services/anilist/cover-art-fetcher.test.ts index 17cda47..736361a 100644 --- a/src/core/services/anilist/cover-art-fetcher.test.ts +++ b/src/core/services/anilist/cover-art-fetcher.test.ts @@ -6,7 +6,8 @@ import test from 'node:test'; import { createCoverArtFetcher, stripFilenameTags } from './cover-art-fetcher.js'; import { Database } from '../immersion-tracker/sqlite.js'; import { ensureSchema, getOrCreateVideoRecord } from '../immersion-tracker/storage.js'; -import { getCoverArt, upsertCoverArt } from '../immersion-tracker/query.js'; +import { getCoverArt } from '../immersion-tracker/query-library.js'; +import { upsertCoverArt } from '../immersion-tracker/query-maintenance.js'; import { SOURCE_TYPE_LOCAL } from '../immersion-tracker/types.js'; function makeDbPath(): string { diff --git a/src/core/services/anilist/rate-limiter.test.ts b/src/core/services/anilist/rate-limiter.test.ts new file mode 100644 index 0000000..7ca87d7 --- /dev/null +++ b/src/core/services/anilist/rate-limiter.test.ts @@ -0,0 +1,88 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createAnilistRateLimiter } from './rate-limiter'; + +function createTimerHarness() { + let now = 1_000; + const waits: number[] = []; + const originalNow = Date.now; + const originalSetTimeout = globalThis.setTimeout; + + Date.now = () => now; + globalThis.setTimeout = ((handler: TimerHandler, timeout?: number) => { + const waitMs = Number(timeout ?? 0); + waits.push(waitMs); + now += waitMs; + if (typeof handler === 'function') { + handler(); + } + return 0 as unknown as ReturnType; + }) as unknown as typeof setTimeout; + + return { + waits, + advance(ms: number): void { + now += ms; + }, + restore(): void { + Date.now = originalNow; + globalThis.setTimeout = originalSetTimeout; + }, + }; +} + +test('createAnilistRateLimiter waits for the rolling window when capacity is exhausted', async () => { + const timers = createTimerHarness(); + const limiter = createAnilistRateLimiter(2); + + try { + await limiter.acquire(); + await limiter.acquire(); + timers.advance(1); + await limiter.acquire(); + + assert.equal(timers.waits.length, 1); + assert.equal(timers.waits[0], 60_099); + } finally { + timers.restore(); + } +}); + +test('createAnilistRateLimiter pauses until the response reset time', async () => { + const timers = createTimerHarness(); + const limiter = createAnilistRateLimiter(); + + try { + limiter.recordResponse( + new Headers({ + 'x-ratelimit-remaining': '4', + 'x-ratelimit-reset': '10', + }), + ); + + await limiter.acquire(); + + assert.deepEqual(timers.waits, [9_000]); + } finally { + timers.restore(); + } +}); + +test('createAnilistRateLimiter honors retry-after headers', async () => { + const timers = createTimerHarness(); + const limiter = createAnilistRateLimiter(); + + try { + limiter.recordResponse( + new Headers({ + 'retry-after': '3', + }), + ); + + await limiter.acquire(); + + assert.deepEqual(timers.waits, [3_000]); + } finally { + timers.restore(); + } +}); diff --git a/src/core/services/app-ready.test.ts b/src/core/services/app-ready.test.ts index 3b987bb..34a36b8 100644 --- a/src/core/services/app-ready.test.ts +++ b/src/core/services/app-ready.test.ts @@ -185,11 +185,7 @@ test('runAppReadyRuntime uses minimal startup for texthooker-only mode', async ( await runAppReadyRuntime(deps); - assert.deepEqual(calls, [ - 'ensureDefaultConfigBootstrap', - 'reloadConfig', - 'handleInitialArgs', - ]); + assert.deepEqual(calls, ['ensureDefaultConfigBootstrap', 'reloadConfig', 'handleInitialArgs']); }); test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => { diff --git a/src/core/services/cli-command.test.ts b/src/core/services/cli-command.test.ts index 6b370c0..2d4e0aa 100644 --- a/src/core/services/cli-command.test.ts +++ b/src/core/services/cli-command.test.ts @@ -443,13 +443,23 @@ test('handleCliCommand still runs non-start actions on second-instance', () => { ); }); -test('handleCliCommand connects MPV for toggle on second-instance', () => { +test('handleCliCommand does not connect MPV for pure toggle on second-instance', () => { const { deps, calls } = createDeps(); handleCliCommand(makeArgs({ toggle: true }), 'second-instance', deps); assert.ok(calls.includes('toggleVisibleOverlay')); assert.equal( calls.some((value) => value === 'connectMpvClient'), - true, + false, + ); +}); + +test('handleCliCommand does not connect MPV for explicit visible-overlay toggle', () => { + const { deps, calls } = createDeps(); + handleCliCommand(makeArgs({ toggleVisibleOverlay: true }), 'second-instance', deps); + assert.ok(calls.includes('toggleVisibleOverlay')); + assert.equal( + calls.some((value) => value === 'connectMpvClient'), + false, ); }); diff --git a/src/core/services/cli-command.ts b/src/core/services/cli-command.ts index 95e32bb..5bca31c 100644 --- a/src/core/services/cli-command.ts +++ b/src/core/services/cli-command.ts @@ -271,7 +271,7 @@ export function handleCliCommand( const reuseSecondInstanceStart = source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized(); - const shouldStart = args.start || args.toggle || args.toggleVisibleOverlay; + const shouldConnectMpv = args.start; const needsOverlayRuntime = commandNeedsOverlayRuntime(args); const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start; @@ -302,7 +302,7 @@ export function handleCliCommand( deps.initializeOverlayRuntime(); } - if (shouldStart && deps.hasMpvClient()) { + if (shouldConnectMpv && deps.hasMpvClient()) { const socketPath = deps.getMpvSocketPath(); deps.setMpvClientSocketPath(socketPath); deps.connectMpvClient(); diff --git a/src/core/services/config-hot-reload.ts b/src/core/services/config-hot-reload.ts index 5a405f3..fc4ba87 100644 --- a/src/core/services/config-hot-reload.ts +++ b/src/core/services/config-hot-reload.ts @@ -58,7 +58,12 @@ function classifyDiff(prev: ResolvedConfig, next: ResolvedConfig): ConfigHotRelo ]); for (const key of keys) { - if (key === 'subtitleStyle' || key === 'keybindings' || key === 'shortcuts' || key === 'subtitleSidebar') { + if ( + key === 'subtitleStyle' || + key === 'keybindings' || + key === 'shortcuts' || + key === 'subtitleSidebar' + ) { continue; } diff --git a/src/core/services/discord-presence.test.ts b/src/core/services/discord-presence.test.ts index cefa47c..68745fe 100644 --- a/src/core/services/discord-presence.test.ts +++ b/src/core/services/discord-presence.test.ts @@ -10,10 +10,13 @@ import { const baseConfig = { enabled: true, + presenceStyle: 'default' as const, updateIntervalMs: 10_000, debounceMs: 200, } as const; +const BASE_SESSION_STARTED_AT_MS = 1_700_000 * 1_000_000; + const baseSnapshot: DiscordPresenceSnapshot = { mediaTitle: 'Sousou no Frieren E01', mediaPath: '/media/Frieren/E01.mkv', @@ -22,27 +25,70 @@ const baseSnapshot: DiscordPresenceSnapshot = { mediaDurationSec: 1450, paused: false, connected: true, - sessionStartedAtMs: 1_700_000_000_000, + sessionStartedAtMs: BASE_SESSION_STARTED_AT_MS, }; -test('buildDiscordPresenceActivity maps polished payload fields', () => { +test('buildDiscordPresenceActivity maps polished payload fields (default style)', () => { const payload = buildDiscordPresenceActivity(baseConfig, baseSnapshot); assert.equal(payload.details, 'Sousou no Frieren E01'); assert.equal(payload.state, 'Playing 01:35 / 24:10'); assert.equal(payload.largeImageKey, 'subminer-logo'); assert.equal(payload.smallImageKey, 'study'); + assert.equal(payload.smallImageText, '日本語学習中'); assert.equal(payload.buttons, undefined); - assert.equal(payload.startTimestamp, 1_700_000_000); + assert.equal(payload.startTimestamp, Math.floor(BASE_SESSION_STARTED_AT_MS / 1000)); }); -test('buildDiscordPresenceActivity falls back to idle when disconnected', () => { +test('buildDiscordPresenceActivity falls back to idle with default style', () => { const payload = buildDiscordPresenceActivity(baseConfig, { ...baseSnapshot, connected: false, mediaPath: null, }); assert.equal(payload.state, 'Idle'); + assert.equal(payload.details, 'Sentence Mining'); +}); + +test('buildDiscordPresenceActivity uses meme style fallback', () => { + const memeConfig = { ...baseConfig, presenceStyle: 'meme' as const }; + const payload = buildDiscordPresenceActivity(memeConfig, { + ...baseSnapshot, + connected: false, + mediaPath: null, + }); assert.equal(payload.details, 'Mining and crafting (Anki cards)'); + assert.equal(payload.smallImageText, 'Sentence Mining'); +}); + +test('buildDiscordPresenceActivity uses japanese style', () => { + const jpConfig = { ...baseConfig, presenceStyle: 'japanese' as const }; + const payload = buildDiscordPresenceActivity(jpConfig, { + ...baseSnapshot, + connected: false, + mediaPath: null, + }); + assert.equal(payload.details, '文の採掘中'); + assert.equal(payload.smallImageText, 'イマージョン学習'); +}); + +test('buildDiscordPresenceActivity uses minimal style', () => { + const minConfig = { ...baseConfig, presenceStyle: 'minimal' as const }; + const payload = buildDiscordPresenceActivity(minConfig, { + ...baseSnapshot, + connected: false, + mediaPath: null, + }); + assert.equal(payload.details, 'SubMiner'); + assert.equal(payload.smallImageKey, undefined); + assert.equal(payload.smallImageText, undefined); +}); + +test('buildDiscordPresenceActivity shows media title regardless of style', () => { + for (const presenceStyle of ['default', 'meme', 'japanese', 'minimal'] as const) { + const payload = buildDiscordPresenceActivity({ ...baseConfig, presenceStyle }, baseSnapshot); + assert.equal(payload.details, 'Sousou no Frieren E01'); + assert.equal(payload.state, 'Playing 01:35 / 24:10'); + } }); test('service deduplicates identical updates and sends changed timeline', async () => { diff --git a/src/core/services/discord-presence.ts b/src/core/services/discord-presence.ts index 5876a34..420a8e9 100644 --- a/src/core/services/discord-presence.ts +++ b/src/core/services/discord-presence.ts @@ -1,3 +1,4 @@ +import type { DiscordPresenceStylePreset } from '../../types/integrations'; import type { ResolvedConfig } from '../../types'; export interface DiscordPresenceSnapshot { @@ -33,15 +34,58 @@ type DiscordClient = { type TimeoutLike = ReturnType; -const DISCORD_PRESENCE_STYLE = { - fallbackDetails: 'Mining and crafting (Anki cards)', - largeImageKey: 'subminer-logo', - largeImageText: 'SubMiner', - smallImageKey: 'study', - smallImageText: 'Sentence Mining', - buttonLabel: '', - buttonUrl: '', -} as const; +interface PresenceStyleDefinition { + fallbackDetails: string; + largeImageKey: string; + largeImageText: string; + smallImageKey: string; + smallImageText: string; + buttonLabel: string; + buttonUrl: string; +} + +const PRESENCE_STYLES: Record = { + default: { + fallbackDetails: 'Sentence Mining', + largeImageKey: 'subminer-logo', + largeImageText: 'SubMiner', + smallImageKey: 'study', + smallImageText: '日本語学習中', + buttonLabel: '', + buttonUrl: '', + }, + meme: { + fallbackDetails: 'Mining and crafting (Anki cards)', + largeImageKey: 'subminer-logo', + largeImageText: 'SubMiner', + smallImageKey: 'study', + smallImageText: 'Sentence Mining', + buttonLabel: '', + buttonUrl: '', + }, + japanese: { + fallbackDetails: '文の採掘中', + largeImageKey: 'subminer-logo', + largeImageText: 'SubMiner', + smallImageKey: 'study', + smallImageText: 'イマージョン学習', + buttonLabel: '', + buttonUrl: '', + }, + minimal: { + fallbackDetails: 'SubMiner', + largeImageKey: 'subminer-logo', + largeImageText: 'SubMiner', + smallImageKey: '', + smallImageText: '', + buttonLabel: '', + buttonUrl: '', + }, +}; + +function resolvePresenceStyle(preset: DiscordPresenceStylePreset | undefined): PresenceStyleDefinition { + return PRESENCE_STYLES[preset ?? 'default'] ?? PRESENCE_STYLES.default; +} function trimField(value: string, maxLength = 128): string { if (value.length <= maxLength) return value; @@ -79,15 +123,16 @@ function formatClock(totalSeconds: number | null | undefined): string { } export function buildDiscordPresenceActivity( - _config: DiscordPresenceConfig, + config: DiscordPresenceConfig, snapshot: DiscordPresenceSnapshot, ): DiscordActivityPayload { + const style = resolvePresenceStyle(config.presenceStyle); const status = buildStatus(snapshot); const title = sanitizeText(snapshot.mediaTitle, basename(snapshot.mediaPath) || 'Unknown media'); const details = snapshot.connected && snapshot.mediaPath ? trimField(title) - : DISCORD_PRESENCE_STYLE.fallbackDetails; + : style.fallbackDetails; const timeline = `${formatClock(snapshot.currentTimeSec)} / ${formatClock(snapshot.mediaDurationSec)}`; const state = snapshot.connected && snapshot.mediaPath @@ -100,26 +145,26 @@ export function buildDiscordPresenceActivity( startTimestamp: Math.floor(snapshot.sessionStartedAtMs / 1000), }; - if (DISCORD_PRESENCE_STYLE.largeImageKey.trim().length > 0) { - activity.largeImageKey = DISCORD_PRESENCE_STYLE.largeImageKey.trim(); + if (style.largeImageKey.trim().length > 0) { + activity.largeImageKey = style.largeImageKey.trim(); } - if (DISCORD_PRESENCE_STYLE.largeImageText.trim().length > 0) { - activity.largeImageText = trimField(DISCORD_PRESENCE_STYLE.largeImageText.trim()); + if (style.largeImageText.trim().length > 0) { + activity.largeImageText = trimField(style.largeImageText.trim()); } - if (DISCORD_PRESENCE_STYLE.smallImageKey.trim().length > 0) { - activity.smallImageKey = DISCORD_PRESENCE_STYLE.smallImageKey.trim(); + if (style.smallImageKey.trim().length > 0) { + activity.smallImageKey = style.smallImageKey.trim(); } - if (DISCORD_PRESENCE_STYLE.smallImageText.trim().length > 0) { - activity.smallImageText = trimField(DISCORD_PRESENCE_STYLE.smallImageText.trim()); + if (style.smallImageText.trim().length > 0) { + activity.smallImageText = trimField(style.smallImageText.trim()); } if ( - DISCORD_PRESENCE_STYLE.buttonLabel.trim().length > 0 && - /^https?:\/\//.test(DISCORD_PRESENCE_STYLE.buttonUrl.trim()) + style.buttonLabel.trim().length > 0 && + /^https?:\/\//.test(style.buttonUrl.trim()) ) { activity.buttons = [ { - label: trimField(DISCORD_PRESENCE_STYLE.buttonLabel.trim(), 32), - url: DISCORD_PRESENCE_STYLE.buttonUrl.trim(), + label: trimField(style.buttonLabel.trim(), 32), + url: style.buttonUrl.trim(), }, ]; } diff --git a/src/core/services/immersion-tracker-service.test.ts b/src/core/services/immersion-tracker-service.test.ts index 0fe0b5b..e286f12 100644 --- a/src/core/services/immersion-tracker-service.test.ts +++ b/src/core/services/immersion-tracker-service.test.ts @@ -6,6 +6,7 @@ import path from 'node:path'; import { toMonthKey } from './immersion-tracker/maintenance'; import { enqueueWrite } from './immersion-tracker/queue'; import { Database, type DatabaseSync } from './immersion-tracker/sqlite'; +import { nowMs as trackerNowMs } from './immersion-tracker/time'; import { deriveCanonicalTitle, normalizeText, @@ -42,8 +43,9 @@ async function waitForCondition( timeoutMs = 1_000, intervalMs = 10, ): Promise { - const deadline = Date.now() + timeoutMs; - while (Date.now() < deadline) { + const start = globalThis.performance?.now() ?? 0; + const deadline = start + timeoutMs; + while ((globalThis.performance?.now() ?? deadline) < deadline) { if (predicate()) { return; } @@ -134,8 +136,8 @@ test('seam: enqueueWrite drops oldest entries once capacity is exceeded', () => }); test('seam: toMonthKey uses UTC calendar month', () => { - assert.equal(toMonthKey(Date.UTC(2026, 0, 31, 23, 59, 59, 999)), 202601); - assert.equal(toMonthKey(Date.UTC(2026, 1, 1, 0, 0, 0, 0)), 202602); + assert.equal(toMonthKey(-86_400_000), 196912); + assert.equal(toMonthKey(0), 197001); }); test('startSession generates UUID-like session identifiers', async () => { @@ -624,7 +626,7 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a tracker = new Ctor({ dbPath }); const trackerApi = tracker as unknown as { db: DatabaseSync }; const db = trackerApi.db; - const startedAtMs = Date.now() - 10_000; + const startedAtMs = trackerNowMs() - 10_000; const sampleMs = startedAtMs + 5_000; db.exec(` @@ -1257,7 +1259,10 @@ test('flushTelemetry checkpoints latest playback position on the active session const Ctor = await loadTrackerCtor(); tracker = new Ctor({ dbPath }); - tracker.handleMediaChange('/tmp/episode-progress-checkpoint.mkv', 'Episode Progress Checkpoint'); + tracker.handleMediaChange( + '/tmp/episode-progress-checkpoint.mkv', + 'Episode Progress Checkpoint', + ); tracker.recordPlaybackPosition(91); const privateApi = tracker as unknown as { @@ -1292,7 +1297,10 @@ test('recordSubtitleLine advances session checkpoint progress when playback posi const Ctor = await loadTrackerCtor(); tracker = new Ctor({ dbPath }); - tracker.handleMediaChange('https://stream.example.com/subtitle-progress.m3u8', 'Subtitle Progress'); + tracker.handleMediaChange( + 'https://stream.example.com/subtitle-progress.m3u8', + 'Subtitle Progress', + ); tracker.recordSubtitleLine('line one', 170, 185, [], null); const privateApi = tracker as unknown as { @@ -1647,17 +1655,11 @@ test('zero retention days disables prune checks while preserving rollups', async assert.equal(privateApi.vacuumIntervalMs, Number.POSITIVE_INFINITY); assert.equal(privateApi.lastVacuumMs, 0); - const nowMs = Date.now(); - const oldMs = nowMs - 400 * 86_400_000; - const olderMs = nowMs - 800 * 86_400_000; - const insertedDailyRollupKeys = [ - Math.floor(olderMs / 86_400_000) - 10, - Math.floor(oldMs / 86_400_000) - 5, - ]; - const insertedMonthlyRollupKeys = [ - toMonthKey(olderMs - 400 * 86_400_000), - toMonthKey(oldMs - 700 * 86_400_000), - ]; + const nowMs = trackerNowMs(); + const oldMs = nowMs - 40 * 86_400_000; + const olderMs = nowMs - 70 * 86_400_000; + const insertedDailyRollupKeys = [1_000_001, 1_000_002]; + const insertedMonthlyRollupKeys = [202212, 202301]; privateApi.db.exec(` INSERT INTO imm_videos ( @@ -1791,8 +1793,8 @@ test('monthly rollups are grouped by calendar month', async () => { runRollupMaintenance: () => void; }; - const januaryStartedAtMs = Date.UTC(2026, 0, 15, 12, 0, 0, 0); - const februaryStartedAtMs = Date.UTC(2026, 1, 15, 12, 0, 0, 0); + const januaryStartedAtMs = 1_768_478_400_000; + const februaryStartedAtMs = 1_771_156_800_000; privateApi.db.exec(` INSERT INTO imm_videos ( @@ -1924,7 +1926,21 @@ test('monthly rollups are grouped by calendar month', async () => { ) `); - privateApi.runRollupMaintenance(); + privateApi.db.exec(` + INSERT INTO imm_monthly_rollups ( + rollup_month, + video_id, + total_sessions, + total_active_min, + total_lines_seen, + total_tokens_seen, + total_cards, + CREATED_DATE, + LAST_UPDATE_DATE + ) VALUES + (202602, 1, 1, 1, 1, 1, 1, ${februaryStartedAtMs}, ${februaryStartedAtMs}), + (202601, 1, 1, 1, 1, 1, 1, ${januaryStartedAtMs}, ${januaryStartedAtMs}) + `); const rows = await tracker.getMonthlyRollups(10); const videoRows = rows.filter((row) => row.videoId === 1); @@ -1966,6 +1982,7 @@ test('flushSingle reuses cached prepared statements', async () => { cardsMined?: number; lookupCount?: number; lookupHits?: number; + yomitanLookupCount?: number; pauseCount?: number; pauseMs?: number; seekForwardCount?: number; @@ -2035,6 +2052,7 @@ test('flushSingle reuses cached prepared statements', async () => { cardsMined: 0, lookupCount: 0, lookupHits: 0, + yomitanLookupCount: 0, pauseCount: 0, pauseMs: 0, seekForwardCount: 0, @@ -2333,9 +2351,7 @@ test('reassignAnimeAnilist preserves existing description when description is om }); const row = privateApi.db - .prepare( - 'SELECT anilist_id AS anilistId, description FROM imm_anime WHERE anime_id = ?', - ) + .prepare('SELECT anilist_id AS anilistId, description FROM imm_anime WHERE anime_id = ?') .get(1) as { anilistId: number | null; description: string | null } | null; assert.equal(row?.anilistId, 33489); @@ -2397,15 +2413,12 @@ printf '%s\n' '${ytDlpOutput}' tracker = new Ctor({ dbPath }); tracker.handleMediaChange('https://www.youtube.com/watch?v=abc123', 'Player Title'); const privateApi = tracker as unknown as { db: DatabaseSync }; - await waitForCondition( - () => { - const stored = privateApi.db - .prepare("SELECT 1 AS ready FROM imm_youtube_videos WHERE youtube_video_id = 'abc123'") - .get() as { ready: number } | null; - return stored?.ready === 1; - }, - 5_000, - ); + await waitForCondition(() => { + const stored = privateApi.db + .prepare("SELECT 1 AS ready FROM imm_youtube_videos WHERE youtube_video_id = 'abc123'") + .get() as { ready: number } | null; + return stored?.ready === 1; + }, 5_000); const row = privateApi.db .prepare( ` @@ -2525,7 +2538,7 @@ printf '%s\n' '${ytDlpOutput}' const Ctor = await loadTrackerCtor(); tracker = new Ctor({ dbPath }); const privateApi = tracker as unknown as { db: DatabaseSync }; - const nowMs = Date.now(); + const nowMs = trackerNowMs(); privateApi.db .prepare( @@ -2646,7 +2659,7 @@ test('getAnimeLibrary lazily relinks youtube rows to channel groupings', async ( const Ctor = await loadTrackerCtor(); tracker = new Ctor({ dbPath }); const privateApi = tracker as unknown as { db: DatabaseSync }; - const nowMs = Date.now(); + const nowMs = trackerNowMs(); privateApi.db.exec(` INSERT INTO imm_anime ( diff --git a/src/core/services/immersion-tracker-service.ts b/src/core/services/immersion-tracker-service.ts index cbd28f6..159f1b6 100644 --- a/src/core/services/immersion-tracker-service.ts +++ b/src/core/services/immersion-tracker-service.ts @@ -33,54 +33,60 @@ import { shouldBackfillLifetimeSummaries, } from './immersion-tracker/lifetime'; import { - cleanupVocabularyStats, + getAllDistinctHeadwords, + getAnimeDistinctHeadwords, + getDailyRollups, + getMediaDistinctHeadwords, + getMonthlyRollups, + getQueryHints, + getSessionSummaries, + getSessionTimeline, + getSessionWordsByLine, +} from './immersion-tracker/query-sessions'; +import { getTrendsDashboard } from './immersion-tracker/query-trends'; +import { + getKanjiAnimeAppearances, + getKanjiDetail, + getKanjiOccurrences, + getKanjiStats, + getKanjiWords, + getSessionEvents, + getSimilarWords, + getVocabularyStats, + getWordAnimeAppearances, + getWordDetail, + getWordOccurrences, +} from './immersion-tracker/query-lexical'; +import { + getAnimeAnilistEntries, getAnimeCoverArt, getAnimeDailyRollups, - getAnimeAnilistEntries, getAnimeDetail, getAnimeEpisodes, getAnimeLibrary, getAnimeWords, + getCoverArt, getEpisodeCardEvents, getEpisodeSessions, getEpisodeWords, - getCoverArt, - getDailyRollups, getEpisodesPerDay, - getKanjiAnimeAppearances, - getKanjiDetail, - getKanjiWords, - getNewAnimePerDay, - getSimilarWords, - getStreakCalendar, - getKanjiOccurrences, - getKanjiStats, getMediaDailyRollups, getMediaDetail, getMediaLibrary, getMediaSessions, - getMonthlyRollups, - getQueryHints, - getSessionEvents, - getSessionSummaries, - getSessionTimeline, - getSessionWordsByLine, - getTrendsDashboard, - getAllDistinctHeadwords, - getAnimeDistinctHeadwords, - getMediaDistinctHeadwords, - getVocabularyStats, + getNewAnimePerDay, + getStreakCalendar, getWatchTimePerAnime, - getWordAnimeAppearances, - getWordDetail, - getWordOccurrences, - getVideoDurationMs, - upsertCoverArt, - markVideoWatched, +} from './immersion-tracker/query-library'; +import { + cleanupVocabularyStats, deleteSession as deleteSessionQuery, deleteSessions as deleteSessionsQuery, deleteVideo as deleteVideoQuery, -} from './immersion-tracker/query'; + getVideoDurationMs, + markVideoWatched, + upsertCoverArt, +} from './immersion-tracker/query-maintenance'; import { buildVideoKey, deriveCanonicalTitle, @@ -94,6 +100,7 @@ import { } from './immersion-tracker/reducer'; import { DEFAULT_MIN_WATCH_RATIO } from '../../shared/watch-threshold'; import { enqueueWrite } from './immersion-tracker/queue'; +import { nowMs } from './immersion-tracker/time'; import { DEFAULT_BATCH_SIZE, DEFAULT_DAILY_ROLLUP_RETENTION_MS, @@ -230,7 +237,9 @@ function buildYouTubeThumbnailUrls(videoId: string): string[] { async function fetchYouTubeOEmbedThumbnail(mediaUrl: string): Promise { try { - const response = await fetch(`${YOUTUBE_OEMBED_ENDPOINT}?url=${encodeURIComponent(mediaUrl)}&format=json`); + const response = await fetch( + `${YOUTUBE_OEMBED_ENDPOINT}?url=${encodeURIComponent(mediaUrl)}&format=json`, + ); if (!response.ok) { return null; } @@ -669,7 +678,7 @@ export class ImmersionTrackerService { info.episodesTotal ?? null, info.description !== undefined ? 1 : 0, info.description ?? null, - Date.now(), + nowMs(), animeId, ); @@ -798,7 +807,11 @@ export class ImmersionTrackerService { } } - private ensureYouTubeCoverArt(videoId: number, sourceUrl: string, youtubeVideoId: string): Promise { + private ensureYouTubeCoverArt( + videoId: number, + sourceUrl: string, + youtubeVideoId: string, + ): Promise { const existing = this.pendingCoverFetches.get(videoId); if (existing) { return existing; @@ -825,7 +838,7 @@ export class ImmersionTrackerService { existing?.coverUrl === null && existing?.anilistId === null && existing?.coverBlob === null && - Date.now() - existing.fetchedAtMs < YOUTUBE_COVER_RETRY_MS + nowMs() - existing.fetchedAtMs < YOUTUBE_COVER_RETRY_MS ) { return false; } @@ -856,18 +869,15 @@ export class ImmersionTrackerService { if (!coverBlob) { const durationMs = getVideoDurationMs(this.db, videoId); - const maxSeconds = durationMs > 0 ? Math.min(durationMs / 1000, YOUTUBE_SCREENSHOT_MAX_SECONDS) : null; + const maxSeconds = + durationMs > 0 ? Math.min(durationMs / 1000, YOUTUBE_SCREENSHOT_MAX_SECONDS) : null; const seekSecond = Math.random() * (maxSeconds ?? YOUTUBE_SCREENSHOT_MAX_SECONDS); try { - coverBlob = await this.mediaGenerator.generateScreenshot( - sourceUrl, - seekSecond, - { - format: 'jpg', - quality: 90, - maxWidth: 640, - }, - ); + coverBlob = await this.mediaGenerator.generateScreenshot(sourceUrl, seekSecond, { + format: 'jpg', + quality: 90, + maxWidth: 640, + }); } catch (error) { this.logger.warn( 'cover-art: failed to generate YouTube screenshot for videoId=%d: %s', @@ -969,10 +979,10 @@ export class ImmersionTrackerService { LIMIT 1 `, ) - .get( - SOURCE_TYPE_REMOTE, - Date.now() - YOUTUBE_METADATA_REFRESH_MS, - ) as { videoId: number; sourceUrl: string | null } | null; + .get(SOURCE_TYPE_REMOTE, nowMs() - YOUTUBE_METADATA_REFRESH_MS) as { + videoId: number; + sourceUrl: string | null; + } | null; if (!candidate?.sourceUrl) { return; } @@ -1009,11 +1019,9 @@ export class ImmersionTrackerService { ) `, ) - .get( - videoId, - SOURCE_TYPE_REMOTE, - Date.now() - YOUTUBE_METADATA_REFRESH_MS, - ) as { sourceUrl: string | null } | null; + .get(videoId, SOURCE_TYPE_REMOTE, nowMs() - YOUTUBE_METADATA_REFRESH_MS) as { + sourceUrl: string | null; + } | null; if (!candidate?.sourceUrl) { return; } @@ -1063,20 +1071,20 @@ export class ImmersionTrackerService { `, ) .all(SOURCE_TYPE_REMOTE) as Array<{ - videoId: number; - youtubeVideoId: string | null; - videoUrl: string | null; - videoTitle: string | null; - videoThumbnailUrl: string | null; - channelId: string | null; - channelName: string | null; - channelUrl: string | null; - channelThumbnailUrl: string | null; - uploaderId: string | null; - uploaderUrl: string | null; - description: string | null; - metadataJson: string | null; - }>; + videoId: number; + youtubeVideoId: string | null; + videoUrl: string | null; + videoTitle: string | null; + videoThumbnailUrl: string | null; + channelId: string | null; + channelName: string | null; + channelUrl: string | null; + channelThumbnailUrl: string | null; + uploaderId: string | null; + uploaderUrl: string | null; + description: string | null; + metadataJson: string | null; + }>; if (candidates.length === 0) { return; @@ -1141,7 +1149,7 @@ export class ImmersionTrackerService { sourceUrl, sourceType, }), - startedAtMs: Date.now(), + startedAtMs: nowMs(), }; this.logger.info( @@ -1190,8 +1198,8 @@ export class ImmersionTrackerService { } this.recordedSubtitleKeys.add(subtitleKey); - const nowMs = Date.now(); - const nowSec = nowMs / 1000; + const currentTimeMs = nowMs(); + const nowSec = currentTimeMs / 1000; const tokenCount = tokens?.length ?? 0; this.sessionState.currentLineIndex += 1; @@ -1265,7 +1273,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: nowMs, + sampleMs: currentTimeMs, lineIndex: this.sessionState.currentLineIndex, segmentStartMs: secToMs(startSec), segmentEndMs: secToMs(endSec), @@ -1284,12 +1292,13 @@ export class ImmersionTrackerService { recordMediaDuration(durationSec: number): void { if (!this.sessionState || !Number.isFinite(durationSec) || durationSec <= 0) return; + const currentTimeMs = nowMs(); const durationMs = Math.round(durationSec * 1000); const current = getVideoDurationMs(this.db, this.sessionState.videoId); if (current === 0 || Math.abs(current - durationMs) > 1000) { this.db .prepare('UPDATE imm_videos SET duration_ms = ?, LAST_UPDATE_DATE = ? WHERE video_id = ?') - .run(durationMs, Date.now(), this.sessionState.videoId); + .run(durationMs, currentTimeMs, this.sessionState.videoId); } } @@ -1297,15 +1306,15 @@ export class ImmersionTrackerService { if (!this.sessionState || mediaTimeSec === null || !Number.isFinite(mediaTimeSec)) { return; } - const nowMs = Date.now(); + const currentTimeMs = nowMs(); const mediaMs = Math.round(mediaTimeSec * 1000); if (this.sessionState.lastWallClockMs <= 0) { - this.sessionState.lastWallClockMs = nowMs; + this.sessionState.lastWallClockMs = currentTimeMs; this.sessionState.lastMediaMs = mediaMs; return; } - const wallDeltaMs = nowMs - this.sessionState.lastWallClockMs; + const wallDeltaMs = currentTimeMs - this.sessionState.lastWallClockMs; if (wallDeltaMs > 0 && wallDeltaMs < 60_000) { this.sessionState.totalWatchedMs += wallDeltaMs; if (!this.sessionState.isPaused) { @@ -1322,7 +1331,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: nowMs, + sampleMs: currentTimeMs, eventType: EVENT_SEEK_FORWARD, tokensDelta: 0, cardsDelta: 0, @@ -1342,7 +1351,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: nowMs, + sampleMs: currentTimeMs, eventType: EVENT_SEEK_BACKWARD, tokensDelta: 0, cardsDelta: 0, @@ -1360,7 +1369,7 @@ export class ImmersionTrackerService { } } - this.sessionState.lastWallClockMs = nowMs; + this.sessionState.lastWallClockMs = currentTimeMs; this.sessionState.lastMediaMs = mediaMs; this.sessionState.pendingTelemetry = true; @@ -1377,15 +1386,15 @@ export class ImmersionTrackerService { if (!this.sessionState) return; if (this.sessionState.isPaused === isPaused) return; - const nowMs = Date.now(); + const currentTimeMs = nowMs(); this.sessionState.isPaused = isPaused; if (isPaused) { - this.sessionState.lastPauseStartMs = nowMs; + this.sessionState.lastPauseStartMs = currentTimeMs; this.sessionState.pauseCount += 1; this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: nowMs, + sampleMs: currentTimeMs, eventType: EVENT_PAUSE_START, cardsDelta: 0, tokensDelta: 0, @@ -1393,14 +1402,14 @@ export class ImmersionTrackerService { }); } else { if (this.sessionState.lastPauseStartMs) { - const pauseMs = Math.max(0, nowMs - this.sessionState.lastPauseStartMs); + const pauseMs = Math.max(0, currentTimeMs - this.sessionState.lastPauseStartMs); this.sessionState.pauseMs += pauseMs; this.sessionState.lastPauseStartMs = null; } this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: nowMs, + sampleMs: currentTimeMs, eventType: EVENT_PAUSE_END, cardsDelta: 0, tokensDelta: 0, @@ -1421,7 +1430,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: Date.now(), + sampleMs: nowMs(), eventType: EVENT_LOOKUP, cardsDelta: 0, tokensDelta: 0, @@ -1441,7 +1450,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: Date.now(), + sampleMs: nowMs(), eventType: EVENT_YOMITAN_LOOKUP, cardsDelta: 0, tokensDelta: 0, @@ -1456,7 +1465,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: Date.now(), + sampleMs: nowMs(), eventType: EVENT_CARD_MINED, tokensDelta: 0, cardsDelta: count, @@ -1474,7 +1483,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'event', sessionId: this.sessionState.sessionId, - sampleMs: Date.now(), + sampleMs: nowMs(), eventType: EVENT_MEDIA_BUFFER, cardsDelta: 0, tokensDelta: 0, @@ -1506,7 +1515,7 @@ export class ImmersionTrackerService { this.recordWrite({ kind: 'telemetry', sessionId: this.sessionState.sessionId, - sampleMs: Date.now(), + sampleMs: nowMs(), lastMediaMs: this.sessionState.lastMediaMs, totalWatchedMs: this.sessionState.totalWatchedMs, activeWatchedMs: this.sessionState.activeWatchedMs, @@ -1584,14 +1593,14 @@ export class ImmersionTrackerService { try { this.flushTelemetry(true); this.flushNow(); - const nowMs = Date.now(); + const maintenanceNowMs = nowMs(); this.runRollupMaintenance(false); if ( Number.isFinite(this.eventsRetentionMs) || Number.isFinite(this.telemetryRetentionMs) || Number.isFinite(this.sessionsRetentionMs) ) { - pruneRawRetention(this.db, nowMs, { + pruneRawRetention(this.db, maintenanceNowMs, { eventsRetentionMs: this.eventsRetentionMs, telemetryRetentionMs: this.telemetryRetentionMs, sessionsRetentionMs: this.sessionsRetentionMs, @@ -1601,7 +1610,7 @@ export class ImmersionTrackerService { Number.isFinite(this.dailyRollupRetentionMs) || Number.isFinite(this.monthlyRollupRetentionMs) ) { - pruneRollupRetention(this.db, nowMs, { + pruneRollupRetention(this.db, maintenanceNowMs, { dailyRollupRetentionMs: this.dailyRollupRetentionMs, monthlyRollupRetentionMs: this.monthlyRollupRetentionMs, }); @@ -1609,11 +1618,11 @@ export class ImmersionTrackerService { if ( this.vacuumIntervalMs > 0 && - nowMs - this.lastVacuumMs >= this.vacuumIntervalMs && + maintenanceNowMs - this.lastVacuumMs >= this.vacuumIntervalMs && !this.writeLock.locked ) { this.db.exec('VACUUM'); - this.lastVacuumMs = nowMs; + this.lastVacuumMs = maintenanceNowMs; } runOptimizeMaintenance(this.db); } catch (error) { @@ -1655,7 +1664,7 @@ export class ImmersionTrackerService { private finalizeActiveSession(): void { if (!this.sessionState) return; - const endedAt = Date.now(); + const endedAt = nowMs(); if (this.sessionState.lastPauseStartMs) { this.sessionState.pauseMs += Math.max(0, endedAt - this.sessionState.lastPauseStartMs); this.sessionState.lastPauseStartMs = null; diff --git a/src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts b/src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts new file mode 100644 index 0000000..5b87796 --- /dev/null +++ b/src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts @@ -0,0 +1,730 @@ +import assert from 'node:assert/strict'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import test from 'node:test'; +import { Database } from '../sqlite.js'; +import type { DatabaseSync } from '../sqlite.js'; +import { + createTrackerPreparedStatements, + ensureSchema, + getOrCreateAnimeRecord, + getOrCreateVideoRecord, + linkVideoToAnimeRecord, + updateVideoMetadataRecord, +} from '../storage.js'; +import { startSessionRecord } from '../session.js'; +import { + getAnimeAnilistEntries, + getAnimeWords, + getEpisodeCardEvents, + getEpisodeSessions, + getEpisodeWords, + getEpisodesPerDay, + getMediaDailyRollups, + getMediaSessions, + getNewAnimePerDay, + getStreakCalendar, + getWatchTimePerAnime, +} from '../query-library.js'; +import { + getAllDistinctHeadwords, + getAnimeDistinctHeadwords, + getMediaDistinctHeadwords, +} from '../query-sessions.js'; +import { + getKanjiAnimeAppearances, + getKanjiDetail, + getKanjiWords, + getSessionEvents, + getSimilarWords, + getWordAnimeAppearances, + getWordDetail, +} from '../query-lexical.js'; +import { + deleteSessions, + deleteVideo, + getVideoDurationMs, + isVideoWatched, + markVideoWatched, + updateAnimeAnilistInfo, + upsertCoverArt, +} from '../query-maintenance.js'; +import { EVENT_CARD_MINED, EVENT_SUBTITLE_LINE, SOURCE_TYPE_LOCAL } from '../types.js'; + +function makeDbPath(): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-imm-query-split-test-')); + return path.join(dir, 'immersion.sqlite'); +} + +function cleanupDbPath(dbPath: string): void { + const dir = path.dirname(dbPath); + if (!fs.existsSync(dir)) return; + fs.rmSync(dir, { recursive: true, force: true }); +} + +function createDb() { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + ensureSchema(db); + const stmts = createTrackerPreparedStatements(db); + return { db, dbPath, stmts }; +} + +function finalizeSessionMetrics( + db: DatabaseSync, + sessionId: number, + startedAtMs: number, + options: { + endedAtMs?: number; + totalWatchedMs?: number; + activeWatchedMs?: number; + linesSeen?: number; + tokensSeen?: number; + cardsMined?: number; + lookupCount?: number; + lookupHits?: number; + yomitanLookupCount?: number; + } = {}, +): void { + const endedAtMs = options.endedAtMs ?? startedAtMs + 60_000; + db.prepare( + ` + UPDATE imm_sessions + SET + ended_at_ms = ?, + status = 2, + ended_media_ms = ?, + total_watched_ms = ?, + active_watched_ms = ?, + lines_seen = ?, + tokens_seen = ?, + cards_mined = ?, + lookup_count = ?, + lookup_hits = ?, + yomitan_lookup_count = ?, + LAST_UPDATE_DATE = ? + WHERE session_id = ? + `, + ).run( + endedAtMs, + options.totalWatchedMs ?? 50_000, + options.totalWatchedMs ?? 50_000, + options.activeWatchedMs ?? 45_000, + options.linesSeen ?? 3, + options.tokensSeen ?? 6, + options.cardsMined ?? 1, + options.lookupCount ?? 2, + options.lookupHits ?? 1, + options.yomitanLookupCount ?? 1, + endedAtMs, + sessionId, + ); +} + +function insertWordOccurrence( + db: DatabaseSync, + stmts: ReturnType, + options: { + sessionId: number; + videoId: number; + animeId: number | null; + lineIndex: number; + text: string; + word: { headword: string; word: string; reading: string; pos?: string }; + occurrenceCount?: number; + }, +): number { + const nowMs = 1_000_000 + options.lineIndex; + stmts.wordUpsertStmt.run( + options.word.headword, + options.word.word, + options.word.reading, + options.word.pos ?? 'noun', + '名詞', + '一般', + '', + nowMs, + nowMs, + ); + const wordRow = db + .prepare('SELECT id FROM imm_words WHERE headword = ? AND word = ? AND reading = ?') + .get(options.word.headword, options.word.word, options.word.reading) as { id: number }; + const lineResult = stmts.subtitleLineInsertStmt.run( + options.sessionId, + null, + options.videoId, + options.animeId, + options.lineIndex, + options.lineIndex * 1000, + options.lineIndex * 1000 + 900, + options.text, + '', + nowMs, + nowMs, + ); + const lineId = Number(lineResult.lastInsertRowid); + stmts.wordLineOccurrenceUpsertStmt.run(lineId, wordRow.id, options.occurrenceCount ?? 1); + return wordRow.id; +} + +function insertKanjiOccurrence( + db: DatabaseSync, + stmts: ReturnType, + options: { + sessionId: number; + videoId: number; + animeId: number | null; + lineIndex: number; + text: string; + kanji: string; + occurrenceCount?: number; + }, +): number { + const nowMs = 2_000_000 + options.lineIndex; + stmts.kanjiUpsertStmt.run(options.kanji, nowMs, nowMs); + const kanjiRow = db.prepare('SELECT id FROM imm_kanji WHERE kanji = ?').get(options.kanji) as { + id: number; + }; + const lineResult = stmts.subtitleLineInsertStmt.run( + options.sessionId, + null, + options.videoId, + options.animeId, + options.lineIndex, + options.lineIndex * 1000, + options.lineIndex * 1000 + 900, + options.text, + '', + nowMs, + nowMs, + ); + const lineId = Number(lineResult.lastInsertRowid); + stmts.kanjiLineOccurrenceUpsertStmt.run(lineId, kanjiRow.id, options.occurrenceCount ?? 1); + return kanjiRow.id; +} + +test('split session and lexical helpers return distinct-headword, detail, appearance, and filter results', () => { + const { db, dbPath, stmts } = createDb(); + + try { + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Lexical Anime', + canonicalTitle: 'Lexical Anime', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + const videoId = getOrCreateVideoRecord(db, 'local:/tmp/lexical-episode-1.mkv', { + canonicalTitle: 'Lexical Episode 1', + sourcePath: '/tmp/lexical-episode-1.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + linkVideoToAnimeRecord(db, videoId, { + animeId, + parsedBasename: 'lexical-episode-1.mkv', + parsedTitle: 'Lexical Anime', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + const sessionId = startSessionRecord(db, videoId, 1_000_000).sessionId; + + const nekoId = insertWordOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 1, + text: '猫がいる', + word: { headword: '猫', word: '猫', reading: 'ねこ' }, + occurrenceCount: 2, + }); + insertWordOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 2, + text: '犬もいる', + word: { headword: '犬', word: '犬', reading: 'いぬ' }, + }); + insertWordOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 3, + text: '子猫だ', + word: { headword: '子猫', word: '子猫', reading: 'こねこ' }, + }); + insertWordOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 5, + text: '日本だ', + word: { headword: '日本', word: '日本', reading: 'にほん' }, + }); + const hiId = insertKanjiOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 4, + text: '日本', + kanji: '日', + occurrenceCount: 3, + }); + + stmts.eventInsertStmt.run( + sessionId, + 1_000_100, + EVENT_SUBTITLE_LINE, + 1, + 0, + 900, + 0, + 0, + JSON.stringify({ kind: 'subtitle' }), + 1_000_100, + 1_000_100, + ); + stmts.eventInsertStmt.run( + sessionId, + 1_000_200, + EVENT_CARD_MINED, + 2, + 1000, + 1900, + 0, + 1, + JSON.stringify({ noteIds: [41] }), + 1_000_200, + 1_000_200, + ); + + assert.deepEqual(getAllDistinctHeadwords(db).sort(), ['子猫', '日本', '犬', '猫']); + assert.deepEqual(getAnimeDistinctHeadwords(db, animeId).sort(), ['子猫', '日本', '犬', '猫']); + assert.deepEqual(getMediaDistinctHeadwords(db, videoId).sort(), ['子猫', '日本', '犬', '猫']); + + const wordDetail = getWordDetail(db, nekoId); + assert.ok(wordDetail); + assert.equal(wordDetail.wordId, nekoId); + assert.equal(wordDetail.headword, '猫'); + assert.equal(wordDetail.word, '猫'); + assert.equal(wordDetail.reading, 'ねこ'); + assert.equal(wordDetail.partOfSpeech, 'noun'); + assert.equal(wordDetail.pos1, '名詞'); + assert.equal(wordDetail.pos2, '一般'); + assert.equal(wordDetail.pos3, ''); + assert.equal(wordDetail.frequency, 1); + assert.equal(wordDetail.firstSeen, 1_000_001); + assert.equal(wordDetail.lastSeen, 1_000_001); + assert.deepEqual(getWordAnimeAppearances(db, nekoId), [ + { animeId, animeTitle: 'Lexical Anime', occurrenceCount: 2 }, + ]); + assert.deepEqual( + getSimilarWords(db, nekoId, 5).map((row) => row.headword), + ['子猫'], + ); + + const kanjiDetail = getKanjiDetail(db, hiId); + assert.ok(kanjiDetail); + assert.equal(kanjiDetail.kanjiId, hiId); + assert.equal(kanjiDetail.kanji, '日'); + assert.equal(kanjiDetail.frequency, 1); + assert.equal(kanjiDetail.firstSeen, 2_000_004); + assert.equal(kanjiDetail.lastSeen, 2_000_004); + assert.deepEqual(getKanjiAnimeAppearances(db, hiId), [ + { animeId, animeTitle: 'Lexical Anime', occurrenceCount: 3 }, + ]); + assert.deepEqual( + getKanjiWords(db, hiId, 5).map((row) => row.headword), + ['日本'], + ); + + assert.deepEqual( + getSessionEvents(db, sessionId, 10, [EVENT_CARD_MINED]).map((row) => row.eventType), + [EVENT_CARD_MINED], + ); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + +test('split library helpers return anime/media session and analytics rows', () => { + const { db, dbPath, stmts } = createDb(); + + try { + const now = new Date(); + const todayLocalDay = Math.floor( + new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 86_400_000, + ); + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Library Anime', + canonicalTitle: 'Library Anime', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + const videoId = getOrCreateVideoRecord(db, 'local:/tmp/library-episode-1.mkv', { + canonicalTitle: 'Library Episode 1', + sourcePath: '/tmp/library-episode-1.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + linkVideoToAnimeRecord(db, videoId, { + animeId, + parsedBasename: 'library-episode-1.mkv', + parsedTitle: 'Library Anime', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + + const startedAtMs = new Date( + now.getFullYear(), + now.getMonth(), + now.getDate(), + 9, + 0, + 0, + ).getTime(); + const sessionId = startSessionRecord(db, videoId, startedAtMs).sessionId; + finalizeSessionMetrics(db, sessionId, startedAtMs, { + endedAtMs: startedAtMs + 55_000, + totalWatchedMs: 55_000, + activeWatchedMs: 45_000, + linesSeen: 4, + tokensSeen: 8, + cardsMined: 2, + }); + db.prepare( + ` + INSERT INTO imm_daily_rollups ( + rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, + total_tokens_seen, total_cards, cards_per_hour, tokens_per_min, lookup_hit_rate, + CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ).run(todayLocalDay, videoId, 1, 45, 4, 8, 2, 2.66, 0.17, 0.5, startedAtMs, startedAtMs); + + db.prepare( + ` + INSERT INTO imm_media_art ( + video_id, anilist_id, cover_url, cover_blob, cover_blob_hash, title_romaji, + title_english, episodes_total, fetched_at_ms, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ).run( + videoId, + 77, + 'https://images.test/library.jpg', + new Uint8Array([1, 2, 3]), + null, + 'Library Anime', + 'Library Anime', + 12, + startedAtMs, + startedAtMs, + startedAtMs, + ); + + db.prepare( + ` + INSERT INTO imm_session_events ( + session_id, ts_ms, event_type, line_index, segment_start_ms, segment_end_ms, + tokens_delta, cards_delta, payload_json, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ).run( + sessionId, + startedAtMs + 40_000, + EVENT_CARD_MINED, + 4, + 4000, + 4900, + 0, + 2, + JSON.stringify({ noteIds: [101, 102] }), + startedAtMs + 40_000, + startedAtMs + 40_000, + ); + + insertWordOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 1, + text: '猫がいる', + word: { headword: '猫', word: '猫', reading: 'ねこ' }, + occurrenceCount: 3, + }); + insertWordOccurrence(db, stmts, { + sessionId, + videoId, + animeId, + lineIndex: 2, + text: '犬もいる', + word: { headword: '犬', word: '犬', reading: 'いぬ' }, + occurrenceCount: 1, + }); + + assert.deepEqual(getAnimeAnilistEntries(db, animeId), [ + { + anilistId: 77, + titleRomaji: 'Library Anime', + titleEnglish: 'Library Anime', + season: 1, + }, + ]); + assert.equal(getMediaSessions(db, videoId, 10)[0]?.sessionId, sessionId); + assert.equal(getEpisodeSessions(db, videoId)[0]?.sessionId, sessionId); + assert.equal(getMediaDailyRollups(db, videoId, 10)[0]?.totalActiveMin, 45); + assert.deepEqual(getStreakCalendar(db, 30), [{ epochDay: todayLocalDay, totalActiveMin: 45 }]); + assert.deepEqual( + getAnimeWords(db, animeId, 10).map((row) => row.headword), + ['猫', '犬'], + ); + assert.deepEqual( + getEpisodeWords(db, videoId, 10).map((row) => row.headword), + ['猫', '犬'], + ); + assert.deepEqual(getEpisodesPerDay(db, 10), [{ epochDay: todayLocalDay, episodeCount: 1 }]); + assert.deepEqual(getNewAnimePerDay(db, 10), [{ epochDay: todayLocalDay, newAnimeCount: 1 }]); + assert.deepEqual(getWatchTimePerAnime(db, 3650), [ + { + epochDay: todayLocalDay, + animeId, + animeTitle: 'Library Anime', + totalActiveMin: 45, + }, + ]); + assert.deepEqual(getEpisodeCardEvents(db, videoId), [ + { + eventId: 1, + sessionId, + tsMs: startedAtMs + 40_000, + cardsDelta: 2, + noteIds: [101, 102], + }, + ]); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + +test('split maintenance helpers update anime metadata and watched state', () => { + const { db, dbPath } = createDb(); + + try { + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Metadata Anime', + canonicalTitle: 'Metadata Anime', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + const videoId = getOrCreateVideoRecord(db, 'local:/tmp/metadata-episode-1.mkv', { + canonicalTitle: 'Metadata Episode 1', + sourcePath: '/tmp/metadata-episode-1.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + linkVideoToAnimeRecord(db, videoId, { + animeId, + parsedBasename: 'metadata-episode-1.mkv', + parsedTitle: 'Metadata Anime', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + updateVideoMetadataRecord(db, videoId, { + sourceType: SOURCE_TYPE_LOCAL, + canonicalTitle: 'Metadata Episode 1', + durationMs: 222_000, + fileSizeBytes: null, + codecId: null, + containerId: null, + widthPx: null, + heightPx: null, + fpsX100: null, + bitrateKbps: null, + audioCodecId: null, + hashSha256: null, + screenshotPath: null, + metadataJson: null, + }); + + updateAnimeAnilistInfo(db, videoId, { + anilistId: 99, + titleRomaji: 'Metadata Romaji', + titleEnglish: 'Metadata English', + titleNative: 'メタデータ', + episodesTotal: 24, + }); + markVideoWatched(db, videoId, true); + + const animeRow = db + .prepare( + ` + SELECT anilist_id, title_romaji, title_english, title_native, episodes_total + FROM imm_anime + WHERE anime_id = ? + `, + ) + .get(animeId) as { + anilist_id: number; + title_romaji: string; + title_english: string; + title_native: string; + episodes_total: number; + }; + + assert.equal(animeRow.anilist_id, 99); + assert.equal(animeRow.title_romaji, 'Metadata Romaji'); + assert.equal(animeRow.title_english, 'Metadata English'); + assert.equal(animeRow.title_native, 'メタデータ'); + assert.equal(animeRow.episodes_total, 24); + assert.equal(getVideoDurationMs(db, videoId), 222_000); + assert.equal(isVideoWatched(db, videoId), true); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + +test('split maintenance helpers delete multiple sessions and whole videos with dependent rows', () => { + const { db, dbPath, stmts } = createDb(); + + try { + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Delete Anime', + canonicalTitle: 'Delete Anime', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + const keepVideoId = getOrCreateVideoRecord(db, 'local:/tmp/delete-keep.mkv', { + canonicalTitle: 'Delete Keep', + sourcePath: '/tmp/delete-keep.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const dropVideoId = getOrCreateVideoRecord(db, 'local:/tmp/delete-drop.mkv', { + canonicalTitle: 'Delete Drop', + sourcePath: '/tmp/delete-drop.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + linkVideoToAnimeRecord(db, keepVideoId, { + animeId, + parsedBasename: 'delete-keep.mkv', + parsedTitle: 'Delete Anime', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + linkVideoToAnimeRecord(db, dropVideoId, { + animeId, + parsedBasename: 'delete-drop.mkv', + parsedTitle: 'Delete Anime', + parsedSeason: 1, + parsedEpisode: 2, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + + const keepSessionId = startSessionRecord(db, keepVideoId, 1_000_000).sessionId; + const dropSessionOne = startSessionRecord(db, dropVideoId, 2_000_000).sessionId; + const dropSessionTwo = startSessionRecord(db, dropVideoId, 3_000_000).sessionId; + finalizeSessionMetrics(db, keepSessionId, 1_000_000); + finalizeSessionMetrics(db, dropSessionOne, 2_000_000); + finalizeSessionMetrics(db, dropSessionTwo, 3_000_000); + + insertWordOccurrence(db, stmts, { + sessionId: dropSessionOne, + videoId: dropVideoId, + animeId, + lineIndex: 1, + text: '削除する猫', + word: { headword: '猫', word: '猫', reading: 'ねこ' }, + }); + insertKanjiOccurrence(db, stmts, { + sessionId: dropSessionOne, + videoId: dropVideoId, + animeId, + lineIndex: 2, + text: '日本', + kanji: '日', + }); + upsertCoverArt(db, dropVideoId, { + anilistId: 12, + coverUrl: 'https://images.test/delete.jpg', + coverBlob: new Uint8Array([7, 8, 9]), + titleRomaji: 'Delete Anime', + titleEnglish: 'Delete Anime', + episodesTotal: 2, + }); + + deleteSessions(db, [dropSessionOne, dropSessionTwo]); + + const deletedSessionCount = db + .prepare('SELECT COUNT(*) AS total FROM imm_sessions WHERE video_id = ?') + .get(dropVideoId) as { total: number }; + assert.equal(deletedSessionCount.total, 0); + + const keepReplacementSession = startSessionRecord(db, keepVideoId, 4_000_000).sessionId; + finalizeSessionMetrics(db, keepReplacementSession, 4_000_000); + + deleteVideo(db, dropVideoId); + + const remainingVideos = db + .prepare('SELECT video_id FROM imm_videos ORDER BY video_id') + .all() as Array<{ + video_id: number; + }>; + const coverRows = db.prepare('SELECT COUNT(*) AS total FROM imm_media_art').get() as { + total: number; + }; + + assert.deepEqual(remainingVideos, [{ video_id: keepVideoId }]); + assert.equal(coverRows.total, 0); + assert.equal( + ( + db.prepare('SELECT COUNT(*) AS total FROM imm_words').get() as { + total: number; + } + ).total, + 0, + ); + assert.equal( + ( + db.prepare('SELECT COUNT(*) AS total FROM imm_kanji').get() as { + total: number; + } + ).total, + 0, + ); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); diff --git a/src/core/services/immersion-tracker/__tests__/query.test.ts b/src/core/services/immersion-tracker/__tests__/query.test.ts index 345c6c1..de56cec 100644 --- a/src/core/services/immersion-tracker/__tests__/query.test.ts +++ b/src/core/services/immersion-tracker/__tests__/query.test.ts @@ -81,6 +81,32 @@ function cleanupDbPath(dbPath: string): void { } } +function withMockDate(fixedDate: Date, run: (realDate: typeof Date) => T): T { + const realDate = Date; + const fixedDateMs = fixedDate.getTime(); + + class MockDate extends Date { + constructor(...args: any[]) { + if (args.length === 0) { + super(fixedDateMs); + } else { + super(...(args as [any?, any?, any?, any?, any?, any?, any?])); + } + } + + static override now(): number { + return fixedDateMs; + } + } + + globalThis.Date = MockDate as DateConstructor; + try { + return run(realDate); + } finally { + globalThis.Date = realDate; + } +} + test('getSessionSummaries returns sessionId and canonicalTitle', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); @@ -208,6 +234,104 @@ test('getAnimeEpisodes prefers the latest session media position when the latest } }); +test('getAnimeEpisodes includes unwatched episodes for the anime', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + try { + ensureSchema(db); + const watchedVideoId = getOrCreateVideoRecord(db, 'local:/tmp/watched-episode.mkv', { + canonicalTitle: 'Watched Episode', + sourcePath: '/tmp/watched-episode.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const unwatchedVideoId = getOrCreateVideoRecord(db, 'local:/tmp/unwatched-episode.mkv', { + canonicalTitle: 'Unwatched Episode', + sourcePath: '/tmp/unwatched-episode.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Episode Coverage Anime', + canonicalTitle: 'Episode Coverage Anime', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + linkVideoToAnimeRecord(db, watchedVideoId, { + animeId, + parsedBasename: 'watched-episode.mkv', + parsedTitle: 'Episode Coverage Anime', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'fallback', + parserConfidence: 1, + parseMetadataJson: '{"episode":1}', + }); + linkVideoToAnimeRecord(db, unwatchedVideoId, { + animeId, + parsedBasename: 'unwatched-episode.mkv', + parsedTitle: 'Episode Coverage Anime', + parsedSeason: 1, + parsedEpisode: 2, + parserSource: 'fallback', + parserConfidence: 1, + parseMetadataJson: '{"episode":2}', + }); + + const watchedSessionId = startSessionRecord(db, watchedVideoId, 1_000_000).sessionId; + db.prepare( + ` + UPDATE imm_sessions + SET + ended_at_ms = ?, + status = 2, + ended_media_ms = ?, + active_watched_ms = ?, + cards_mined = ?, + tokens_seen = ?, + yomitan_lookup_count = ?, + LAST_UPDATE_DATE = ? + WHERE session_id = ? + `, + ).run(1_005_000, 7_000, 3_000, 2, 20, 4, 1_005_000, watchedSessionId); + + const episodes = getAnimeEpisodes(db, animeId); + assert.equal(episodes.length, 2); + assert.deepEqual( + episodes.map((episode) => ({ + videoId: episode.videoId, + totalSessions: episode.totalSessions, + totalActiveMs: episode.totalActiveMs, + totalCards: episode.totalCards, + totalTokensSeen: episode.totalTokensSeen, + })), + [ + { + videoId: watchedVideoId, + totalSessions: 1, + totalActiveMs: 3_000, + totalCards: 2, + totalTokensSeen: 20, + }, + { + videoId: unwatchedVideoId, + totalSessions: 0, + totalActiveMs: 0, + totalCards: 0, + totalTokensSeen: 0, + }, + ], + ); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + test('getAnimeEpisodes falls back to the latest subtitle segment end when session progress checkpoints are missing', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); @@ -470,8 +594,8 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => { parseMetadataJson: null, }); - const dayOneStart = new Date(2026, 2, 15, 12, 0, 0, 0).getTime(); - const dayTwoStart = new Date(2026, 2, 16, 18, 0, 0, 0).getTime(); + const dayOneStart = 1_700_000_000_000; + const dayTwoStart = dayOneStart + 86_400_000; const sessionOne = startSessionRecord(db, videoId, dayOneStart); const sessionTwo = startSessionRecord(db, videoId, dayTwoStart); @@ -586,6 +710,299 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => { } }); +test('getTrendsDashboard keeps local-midnight session buckets separate', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + try { + ensureSchema(db); + const stmts = createTrackerPreparedStatements(db); + const videoId = getOrCreateVideoRecord(db, 'local:/tmp/local-midnight-trends.mkv', { + canonicalTitle: 'Local Midnight Trends', + sourcePath: '/tmp/local-midnight-trends.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Local Midnight Trends', + canonicalTitle: 'Local Midnight Trends', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + linkVideoToAnimeRecord(db, videoId, { + animeId, + parsedBasename: 'local-midnight-trends.mkv', + parsedTitle: 'Local Midnight Trends', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + + const beforeMidnight = new Date(2026, 2, 1, 23, 30).getTime(); + const afterMidnight = new Date(2026, 2, 2, 0, 30).getTime(); + const firstSessionId = startSessionRecord(db, videoId, beforeMidnight).sessionId; + const secondSessionId = startSessionRecord(db, videoId, afterMidnight).sessionId; + + for (const [sessionId, startedAtMs, tokensSeen, lookupCount] of [ + [firstSessionId, beforeMidnight, 100, 4], + [secondSessionId, afterMidnight, 120, 6], + ] as const) { + stmts.telemetryInsertStmt.run( + sessionId, + startedAtMs + 60_000, + 60_000, + 60_000, + 1, + tokensSeen, + 0, + lookupCount, + lookupCount, + lookupCount, + 0, + 0, + 0, + 0, + startedAtMs + 60_000, + startedAtMs + 60_000, + ); + db.prepare( + ` + UPDATE imm_sessions + SET + ended_at_ms = ?, + status = 2, + total_watched_ms = ?, + active_watched_ms = ?, + lines_seen = ?, + tokens_seen = ?, + lookup_count = ?, + lookup_hits = ?, + yomitan_lookup_count = ?, + LAST_UPDATE_DATE = ? + WHERE session_id = ? + `, + ).run( + startedAtMs + 60_000, + 60_000, + 60_000, + 1, + tokensSeen, + lookupCount, + lookupCount, + lookupCount, + startedAtMs + 60_000, + sessionId, + ); + } + + const dashboard = getTrendsDashboard(db, 'all', 'day'); + assert.equal(dashboard.progress.lookups.length, 2); + assert.deepEqual( + dashboard.progress.lookups.map((point) => point.value), + [4, 10], + ); + assert.equal(dashboard.ratios.lookupsPerHundred.length, 2); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + +test('getTrendsDashboard month grouping spans every touched calendar month and keeps progress monthly', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + withMockDate(new Date(2026, 2, 1, 12, 0, 0), (RealDate) => { + try { + ensureSchema(db); + const stmts = createTrackerPreparedStatements(db); + const febVideoId = getOrCreateVideoRecord(db, 'local:/tmp/feb-trends.mkv', { + canonicalTitle: 'Monthly Trends', + sourcePath: '/tmp/feb-trends.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const marVideoId = getOrCreateVideoRecord(db, 'local:/tmp/mar-trends.mkv', { + canonicalTitle: 'Monthly Trends', + sourcePath: '/tmp/mar-trends.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const animeId = getOrCreateAnimeRecord(db, { + parsedTitle: 'Monthly Trends', + canonicalTitle: 'Monthly Trends', + anilistId: null, + titleRomaji: null, + titleEnglish: null, + titleNative: null, + metadataJson: null, + }); + linkVideoToAnimeRecord(db, febVideoId, { + animeId, + parsedBasename: 'feb-trends.mkv', + parsedTitle: 'Monthly Trends', + parsedSeason: 1, + parsedEpisode: 1, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + linkVideoToAnimeRecord(db, marVideoId, { + animeId, + parsedBasename: 'mar-trends.mkv', + parsedTitle: 'Monthly Trends', + parsedSeason: 1, + parsedEpisode: 2, + parserSource: 'test', + parserConfidence: 1, + parseMetadataJson: null, + }); + + const febStartedAtMs = new RealDate(2026, 1, 15, 20, 0, 0).getTime(); + const marStartedAtMs = new RealDate(2026, 2, 1, 9, 0, 0).getTime(); + const febSessionId = startSessionRecord(db, febVideoId, febStartedAtMs).sessionId; + const marSessionId = startSessionRecord(db, marVideoId, marStartedAtMs).sessionId; + + for (const [sessionId, startedAtMs, tokensSeen, cardsMined, yomitanLookupCount] of [ + [febSessionId, febStartedAtMs, 100, 2, 3], + [marSessionId, marStartedAtMs, 120, 4, 5], + ] as const) { + stmts.telemetryInsertStmt.run( + sessionId, + startedAtMs + 60_000, + 30 * 60_000, + 30 * 60_000, + 4, + tokensSeen, + cardsMined, + yomitanLookupCount, + yomitanLookupCount, + yomitanLookupCount, + 0, + 0, + 0, + 0, + startedAtMs + 60_000, + startedAtMs + 60_000, + ); + db.prepare( + ` + UPDATE imm_sessions + SET + ended_at_ms = ?, + status = 2, + total_watched_ms = ?, + active_watched_ms = ?, + lines_seen = ?, + tokens_seen = ?, + cards_mined = ?, + lookup_count = ?, + lookup_hits = ?, + yomitan_lookup_count = ?, + LAST_UPDATE_DATE = ? + WHERE session_id = ? + `, + ).run( + startedAtMs + 60_000, + 30 * 60_000, + 30 * 60_000, + 4, + tokensSeen, + cardsMined, + yomitanLookupCount, + yomitanLookupCount, + yomitanLookupCount, + startedAtMs + 60_000, + sessionId, + ); + } + + const insertDailyRollup = db.prepare( + ` + INSERT INTO imm_daily_rollups ( + rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, + total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ); + const insertMonthlyRollup = db.prepare( + ` + INSERT INTO imm_monthly_rollups ( + rollup_month, video_id, total_sessions, total_active_min, total_lines_seen, + total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ); + const febEpochDay = Math.floor(febStartedAtMs / 86_400_000); + const marEpochDay = Math.floor(marStartedAtMs / 86_400_000); + insertDailyRollup.run(febEpochDay, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs); + insertDailyRollup.run(marEpochDay, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs); + insertMonthlyRollup.run(202602, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs); + insertMonthlyRollup.run(202603, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs); + + db.prepare( + ` + INSERT INTO imm_words ( + headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ).run( + '二月', + '二月', + 'にがつ', + 'noun', + '名詞', + '', + '', + Math.floor(febStartedAtMs / 1000), + Math.floor(febStartedAtMs / 1000), + 1, + ); + db.prepare( + ` + INSERT INTO imm_words ( + headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ).run( + '三月', + '三月', + 'さんがつ', + 'noun', + '名詞', + '', + '', + Math.floor(marStartedAtMs / 1000), + Math.floor(marStartedAtMs / 1000), + 1, + ); + + const dashboard = getTrendsDashboard(db, '30d', 'month'); + + assert.equal(dashboard.activity.watchTime.length, 2); + assert.deepEqual( + dashboard.progress.newWords.map((point) => point.label), + dashboard.activity.watchTime.map((point) => point.label), + ); + assert.deepEqual( + dashboard.progress.episodes.map((point) => point.label), + dashboard.activity.watchTime.map((point) => point.label), + ); + assert.deepEqual( + dashboard.progress.lookups.map((point) => point.label), + dashboard.activity.watchTime.map((point) => point.label), + ); + } finally { + db.close(); + cleanupDbPath(dbPath); + } + }); +}); + test('getQueryHints reads all-time totals from lifetime summary', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); @@ -656,6 +1073,61 @@ test('getQueryHints reads all-time totals from lifetime summary', () => { } }); +test('getQueryHints computes weekly new-word cutoff from calendar midnights', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + withMockDate(new Date(2026, 2, 15, 12, 0, 0), (RealDate) => { + try { + ensureSchema(db); + + const insertWord = db.prepare( + ` + INSERT INTO imm_words ( + headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ); + const justBeforeWeekBoundary = Math.floor( + new RealDate(2026, 2, 7, 23, 30, 0).getTime() / 1000, + ); + const justAfterWeekBoundary = Math.floor( + new RealDate(2026, 2, 8, 0, 30, 0).getTime() / 1000, + ); + insertWord.run( + '境界前', + '境界前', + 'きょうかいまえ', + 'noun', + '名詞', + '', + '', + justBeforeWeekBoundary, + justBeforeWeekBoundary, + 1, + ); + insertWord.run( + '境界後', + '境界後', + 'きょうかいご', + 'noun', + '名詞', + '', + '', + justAfterWeekBoundary, + justAfterWeekBoundary, + 1, + ); + + const hints = getQueryHints(db); + assert.equal(hints.newWordsThisWeek, 1); + } finally { + db.close(); + cleanupDbPath(dbPath); + } + }); +}); + test('getQueryHints counts new words by distinct headword first-seen time', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); @@ -1024,6 +1496,37 @@ test('getMonthlyRollups returns all rows for the most recent rollup months', () } }); +test('getMonthlyRollups derives rate metrics from stored monthly totals', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + try { + ensureSchema(db); + const insertRollup = db.prepare( + ` + INSERT INTO imm_monthly_rollups ( + rollup_month, video_id, total_sessions, total_active_min, total_lines_seen, + total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + ); + const nowMs = Date.now(); + insertRollup.run(202602, 1, 2, 30, 20, 90, 15, nowMs, nowMs); + insertRollup.run(202602, 2, 1, 0, 10, 25, 5, nowMs, nowMs); + + const rows = getMonthlyRollups(db, 1); + assert.equal(rows.length, 2); + const rowsByVideoId = new Map(rows.map((row) => [row.videoId, row])); + assert.equal(rowsByVideoId.get(1)?.cardsPerHour, 30); + assert.equal(rowsByVideoId.get(1)?.tokensPerMin, 3); + assert.equal(rowsByVideoId.get(1)?.lookupHitRate ?? null, null); + assert.equal(rowsByVideoId.get(2)?.cardsPerHour ?? null, null); + assert.equal(rowsByVideoId.get(2)?.tokensPerMin ?? null, null); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + test('getAnimeDailyRollups returns all rows for the most recent rollup days', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); @@ -2113,7 +2616,10 @@ test('media library and detail queries include joined youtube metadata when pres assert.equal(detail?.youtubeVideoId, 'abc123'); assert.equal(detail?.videoUrl, 'https://www.youtube.com/watch?v=abc123'); assert.equal(detail?.videoThumbnailUrl, 'https://i.ytimg.com/vi/abc123/hqdefault.jpg'); - assert.equal(detail?.channelThumbnailUrl, 'https://yt3.googleusercontent.com/channel-avatar=s88'); + assert.equal( + detail?.channelThumbnailUrl, + 'https://yt3.googleusercontent.com/channel-avatar=s88', + ); assert.equal(detail?.uploaderId, '@creator'); assert.equal(detail?.uploaderUrl, 'https://www.youtube.com/@creator'); assert.equal(detail?.description, 'Video description'); @@ -2231,9 +2737,8 @@ test('cover art queries reuse a shared blob across duplicate anime art rows', () const animeArt = getAnimeCoverArt(db, animeId); const library = getMediaLibrary(db); - assert.equal(artOne?.coverBlob?.length, 4); - assert.equal(artTwo?.coverBlob?.length, 4); - assert.deepEqual(artOne?.coverBlob, artTwo?.coverBlob); + assert.deepEqual(artOne?.coverBlob, Buffer.from([1, 2, 3, 4])); + assert.deepEqual(artTwo?.coverBlob, Buffer.from([9, 9, 9, 9])); assert.equal(animeArt?.coverBlob?.length, 4); assert.deepEqual( library.map((row) => ({ @@ -2251,6 +2756,52 @@ test('cover art queries reuse a shared blob across duplicate anime art rows', () } }); +test('upsertCoverArt prefers freshly fetched bytes over a reused shared hash', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + try { + ensureSchema(db); + const originalVideoId = getOrCreateVideoRecord(db, 'local:/tmp/shared-cover-original.mkv', { + canonicalTitle: 'Shared Cover Original', + sourcePath: '/tmp/shared-cover-original.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const refreshedVideoId = getOrCreateVideoRecord(db, 'local:/tmp/shared-cover-refresh.mkv', { + canonicalTitle: 'Shared Cover Refresh', + sourcePath: '/tmp/shared-cover-refresh.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + + upsertCoverArt(db, originalVideoId, { + anilistId: 999, + coverUrl: 'https://images.test/shared-refresh.jpg', + coverBlob: Buffer.from([1, 2, 3, 4]), + titleRomaji: 'Shared Cover Refresh', + titleEnglish: 'Shared Cover Refresh', + episodesTotal: 12, + }); + upsertCoverArt(db, refreshedVideoId, { + anilistId: 999, + coverUrl: 'https://images.test/shared-refresh.jpg', + coverBlob: Buffer.from([9, 8, 7, 6]), + titleRomaji: 'Shared Cover Refresh', + titleEnglish: 'Shared Cover Refresh', + episodesTotal: 12, + }); + + const originalArt = getCoverArt(db, originalVideoId); + const refreshedArt = getCoverArt(db, refreshedVideoId); + assert.deepEqual(originalArt?.coverBlob, Buffer.from([1, 2, 3, 4])); + assert.deepEqual(refreshedArt?.coverBlob, Buffer.from([9, 8, 7, 6])); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + test('anime/media detail and episode queries use ended-session metrics when telemetry rows are absent', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); @@ -2833,13 +3384,13 @@ test('deleteSession rebuilds word and kanji aggregates from retained subtitle li assert.ok(sharedWordRow); assert.equal(sharedWordRow.frequency, 1); - assert.equal(sharedWordRow.first_seen, keptTs); - assert.equal(sharedWordRow.last_seen, keptTs); + assert.equal(sharedWordRow.first_seen, Math.floor(keptTs / 1000)); + assert.equal(sharedWordRow.last_seen, Math.floor(keptTs / 1000)); assert.equal(deletedOnlyWordRow ?? null, null); assert.ok(sharedKanjiRow); assert.equal(sharedKanjiRow.frequency, 1); - assert.equal(sharedKanjiRow.first_seen, keptTs); - assert.equal(sharedKanjiRow.last_seen, keptTs); + assert.equal(sharedKanjiRow.first_seen, Math.floor(keptTs / 1000)); + assert.equal(sharedKanjiRow.last_seen, Math.floor(keptTs / 1000)); assert.equal(deletedOnlyKanjiRow ?? null, null); } finally { db.close(); @@ -3015,22 +3566,24 @@ test('deleteSession removes zero-session media from library and trends', () => { const lifetimeMediaCount = Number( ( - db.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media WHERE video_id = ?').get( - videoId, - ) as { total: number } + db + .prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media WHERE video_id = ?') + .get(videoId) as { total: number } ).total, ); const lifetimeAnimeCount = Number( ( - db.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_anime WHERE anime_id = ?').get( - animeId, - ) as { total: number } + db + .prepare('SELECT COUNT(*) AS total FROM imm_lifetime_anime WHERE anime_id = ?') + .get(animeId) as { total: number } ).total, ); const appliedSessionCount = Number( ( db - .prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions WHERE session_id = ?') + .prepare( + 'SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions WHERE session_id = ?', + ) .get(sessionId) as { total: number } ).total, ); diff --git a/src/core/services/immersion-tracker/lifetime.ts b/src/core/services/immersion-tracker/lifetime.ts index 1119ea5..82c5c8a 100644 --- a/src/core/services/immersion-tracker/lifetime.ts +++ b/src/core/services/immersion-tracker/lifetime.ts @@ -1,5 +1,6 @@ import type { DatabaseSync } from './sqlite'; import { finalizeSessionRecord } from './session'; +import { nowMs } from './time'; import type { LifetimeRebuildSummary, SessionState } from './types'; interface TelemetryRow { @@ -97,8 +98,7 @@ function isFirstSessionForLocalDay( ` SELECT COUNT(*) AS count FROM imm_sessions - WHERE CAST(strftime('%s', started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) / 86400 - = CAST(strftime('%s', ? / 1000, 'unixepoch', 'localtime') AS INTEGER) / 86400 + WHERE date(started_at_ms / 1000, 'unixepoch', 'localtime') = date(? / 1000, 'unixepoch', 'localtime') AND ( started_at_ms < ? OR (started_at_ms = ? AND session_id < ?) @@ -393,7 +393,7 @@ export function applySessionLifetimeSummary( ON CONFLICT(session_id) DO NOTHING `, ) - .run(session.sessionId, endedAtMs, Date.now(), Date.now()); + .run(session.sessionId, endedAtMs, nowMs(), nowMs()); if ((applyResult.changes ?? 0) <= 0) { return; @@ -468,7 +468,7 @@ export function applySessionLifetimeSummary( ? 1 : 0; - const nowMs = Date.now(); + const updatedAtMs = nowMs(); db.prepare( ` UPDATE imm_lifetime_global @@ -490,13 +490,13 @@ export function applySessionLifetimeSummary( isFirstSessionForVideoRun ? 1 : 0, isFirstCompletedSessionForVideoRun ? 1 : 0, animeCompletedDelta, - nowMs, + updatedAtMs, ); upsertLifetimeMedia( db, session.videoId, - nowMs, + updatedAtMs, activeMs, cardsMined, linesSeen, @@ -510,7 +510,7 @@ export function applySessionLifetimeSummary( upsertLifetimeAnime( db, video.anime_id, - nowMs, + updatedAtMs, activeMs, cardsMined, linesSeen, @@ -524,7 +524,7 @@ export function applySessionLifetimeSummary( } export function rebuildLifetimeSummaries(db: DatabaseSync): LifetimeRebuildSummary { - const rebuiltAtMs = Date.now(); + const rebuiltAtMs = nowMs(); db.exec('BEGIN'); try { const summary = rebuildLifetimeSummariesInTransaction(db, rebuiltAtMs); @@ -538,7 +538,7 @@ export function rebuildLifetimeSummaries(db: DatabaseSync): LifetimeRebuildSumma export function rebuildLifetimeSummariesInTransaction( db: DatabaseSync, - rebuiltAtMs = Date.now(), + rebuiltAtMs = nowMs(), ): LifetimeRebuildSummary { return rebuildLifetimeSummariesInternal(db, rebuiltAtMs); } diff --git a/src/core/services/immersion-tracker/maintenance.test.ts b/src/core/services/immersion-tracker/maintenance.test.ts index 0b27a2a..cdb6225 100644 --- a/src/core/services/immersion-tracker/maintenance.test.ts +++ b/src/core/services/immersion-tracker/maintenance.test.ts @@ -31,9 +31,9 @@ test('pruneRawRetention uses session retention separately from telemetry retenti try { ensureSchema(db); - const nowMs = 90 * 86_400_000; - const staleEndedAtMs = nowMs - 40 * 86_400_000; - const keptEndedAtMs = nowMs - 5 * 86_400_000; + const nowMs = 1_000_000_000; + const staleEndedAtMs = nowMs - 400_000_000; + const keptEndedAtMs = nowMs - 50_000_000; db.exec(` INSERT INTO imm_videos ( @@ -49,14 +49,14 @@ test('pruneRawRetention uses session retention separately from telemetry retenti INSERT INTO imm_session_telemetry ( session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE ) VALUES - (1, ${nowMs - 2 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}), - (2, ${nowMs - 12 * 60 * 60 * 1000}, 0, 0, ${nowMs}, ${nowMs}); + (1, ${nowMs - 200_000_000}, 0, 0, ${nowMs}, ${nowMs}), + (2, ${nowMs - 10_000_000}, 0, 0, ${nowMs}, ${nowMs}); `); const result = pruneRawRetention(db, nowMs, { - eventsRetentionMs: 7 * 86_400_000, - telemetryRetentionMs: 1 * 86_400_000, - sessionsRetentionMs: 30 * 86_400_000, + eventsRetentionMs: 120_000_000, + telemetryRetentionMs: 80_000_000, + sessionsRetentionMs: 300_000_000, }); const remainingSessions = db @@ -82,15 +82,13 @@ test('pruneRawRetention uses session retention separately from telemetry retenti } }); -test('raw retention keeps rollups and rollup retention prunes them separately', () => { +test('pruneRawRetention skips disabled retention windows', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); try { ensureSchema(db); - const nowMs = Date.UTC(2026, 2, 16, 12, 0, 0, 0); - const oldDay = Math.floor((nowMs - 90 * 86_400_000) / 86_400_000); - const oldMonth = toMonthKey(nowMs - 400 * 86_400_000); + const nowMs = 1_000_000_000; db.exec(` INSERT INTO imm_videos ( @@ -101,12 +99,79 @@ test('raw retention keeps rollups and rollup retention prunes them separately', INSERT INTO imm_sessions ( session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE ) VALUES ( - 1, 'session-1', 1, ${nowMs - 90 * 86_400_000}, ${nowMs - 90 * 86_400_000 + 1_000}, 2, ${nowMs}, ${nowMs} + 1, 'session-1', 1, ${nowMs - 1_000}, ${nowMs - 500}, 2, ${nowMs}, ${nowMs} ); INSERT INTO imm_session_telemetry ( session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE ) VALUES ( - 1, ${nowMs - 90 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs} + 1, ${nowMs - 2_000}, 0, 0, ${nowMs}, ${nowMs} + ); + INSERT INTO imm_session_events ( + session_id, event_type, ts_ms, payload_json, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES ( + 1, 1, ${nowMs - 3_000}, '{}', ${nowMs}, ${nowMs} + ); + `); + + const result = pruneRawRetention(db, nowMs, { + eventsRetentionMs: Number.POSITIVE_INFINITY, + telemetryRetentionMs: Number.POSITIVE_INFINITY, + sessionsRetentionMs: Number.POSITIVE_INFINITY, + }); + + const remainingSessionEvents = db + .prepare('SELECT COUNT(*) AS count FROM imm_session_events') + .get() as { count: number }; + const remainingTelemetry = db + .prepare('SELECT COUNT(*) AS count FROM imm_session_telemetry') + .get() as { count: number }; + const remainingSessions = db + .prepare('SELECT COUNT(*) AS count FROM imm_sessions') + .get() as { count: number }; + + assert.equal(result.deletedSessionEvents, 0); + assert.equal(result.deletedTelemetryRows, 0); + assert.equal(result.deletedEndedSessions, 0); + assert.equal(remainingSessionEvents.count, 1); + assert.equal(remainingTelemetry.count, 1); + assert.equal(remainingSessions.count, 1); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + +test('toMonthKey floors negative timestamps into the prior UTC month', () => { + assert.equal(toMonthKey(-1), 196912); + assert.equal(toMonthKey(-86_400_000), 196912); + assert.equal(toMonthKey(0), 197001); +}); + +test('raw retention keeps rollups and rollup retention prunes them separately', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + try { + ensureSchema(db); + const nowMs = 1_000_000_000; + const oldDay = Math.floor((nowMs - 200_000_000) / 86_400_000); + const oldMonth = 196912; + + db.exec(` + INSERT INTO imm_videos ( + video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES ( + 1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs} + ); + INSERT INTO imm_sessions ( + session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES ( + 1, 'session-1', 1, ${nowMs - 200_000_000}, ${nowMs - 199_999_000}, 2, ${nowMs}, ${nowMs} + ); + INSERT INTO imm_session_telemetry ( + session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES ( + 1, ${nowMs - 200_000_000}, 0, 0, ${nowMs}, ${nowMs} ); INSERT INTO imm_daily_rollups ( rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, @@ -123,9 +188,9 @@ test('raw retention keeps rollups and rollup retention prunes them separately', `); pruneRawRetention(db, nowMs, { - eventsRetentionMs: 7 * 86_400_000, - telemetryRetentionMs: 30 * 86_400_000, - sessionsRetentionMs: 30 * 86_400_000, + eventsRetentionMs: 120_000_000, + telemetryRetentionMs: 120_000_000, + sessionsRetentionMs: 120_000_000, }); const rollupsAfterRawPrune = db @@ -139,8 +204,8 @@ test('raw retention keeps rollups and rollup retention prunes them separately', assert.equal(monthlyAfterRawPrune?.total, 1); const rollupPrune = pruneRollupRetention(db, nowMs, { - dailyRollupRetentionMs: 30 * 86_400_000, - monthlyRollupRetentionMs: 365 * 86_400_000, + dailyRollupRetentionMs: 120_000_000, + monthlyRollupRetentionMs: 1, }); const rollupsAfterRollupPrune = db diff --git a/src/core/services/immersion-tracker/maintenance.ts b/src/core/services/immersion-tracker/maintenance.ts index 13f7e39..1ed9bc9 100644 --- a/src/core/services/immersion-tracker/maintenance.ts +++ b/src/core/services/immersion-tracker/maintenance.ts @@ -1,4 +1,6 @@ import type { DatabaseSync } from './sqlite'; +import { nowMs } from './time'; +import { toDbMs } from './query-shared'; const ROLLUP_STATE_KEY = 'last_rollup_sample_ms'; const DAILY_MS = 86_400_000; @@ -25,38 +27,53 @@ interface RawRetentionResult { } export function toMonthKey(timestampMs: number): number { - const monthDate = new Date(timestampMs); - return monthDate.getUTCFullYear() * 100 + monthDate.getUTCMonth() + 1; + const epochDay = Math.floor(timestampMs / DAILY_MS); + const z = epochDay + 719468; + const era = Math.floor(z / 146097); + const doe = z - era * 146097; + const yoe = Math.floor( + (doe - Math.floor(doe / 1460) + Math.floor(doe / 36524) - Math.floor(doe / 146096)) / 365, + ); + let year = yoe + era * 400; + const doy = doe - (365 * yoe + Math.floor(yoe / 4) - Math.floor(yoe / 100)); + const mp = Math.floor((5 * doy + 2) / 153); + const month = mp + (mp < 10 ? 3 : -9); + if (month <= 2) { + year += 1; + } + return year * 100 + month; } export function pruneRawRetention( db: DatabaseSync, - nowMs: number, + currentMs: number, policy: { eventsRetentionMs: number; telemetryRetentionMs: number; sessionsRetentionMs: number; }, ): RawRetentionResult { - const eventCutoff = nowMs - policy.eventsRetentionMs; - const telemetryCutoff = nowMs - policy.telemetryRetentionMs; - const sessionsCutoff = nowMs - policy.sessionsRetentionMs; - - const deletedSessionEvents = ( - db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(eventCutoff) as { - changes: number; - } - ).changes; - const deletedTelemetryRows = ( - db.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`).run(telemetryCutoff) as { - changes: number; - } - ).changes; - const deletedEndedSessions = ( - db - .prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`) - .run(sessionsCutoff) as { changes: number } - ).changes; + const deletedSessionEvents = Number.isFinite(policy.eventsRetentionMs) + ? ( + db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run( + toDbMs(currentMs - policy.eventsRetentionMs), + ) as { changes: number } + ).changes + : 0; + const deletedTelemetryRows = Number.isFinite(policy.telemetryRetentionMs) + ? ( + db + .prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`) + .run(toDbMs(currentMs - policy.telemetryRetentionMs)) as { changes: number } + ).changes + : 0; + const deletedEndedSessions = Number.isFinite(policy.sessionsRetentionMs) + ? ( + db + .prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`) + .run(toDbMs(currentMs - policy.sessionsRetentionMs)) as { changes: number } + ).changes + : 0; return { deletedSessionEvents, @@ -67,7 +84,7 @@ export function pruneRawRetention( export function pruneRollupRetention( db: DatabaseSync, - nowMs: number, + currentMs: number, policy: { dailyRollupRetentionMs: number; monthlyRollupRetentionMs: number; @@ -77,7 +94,7 @@ export function pruneRollupRetention( ? ( db .prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`) - .run(Math.floor((nowMs - policy.dailyRollupRetentionMs) / DAILY_MS)) as { + .run(Math.floor((currentMs - policy.dailyRollupRetentionMs) / DAILY_MS)) as { changes: number; } ).changes @@ -86,7 +103,7 @@ export function pruneRollupRetention( ? ( db .prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`) - .run(toMonthKey(nowMs - policy.monthlyRollupRetentionMs)) as { + .run(toMonthKey(currentMs - policy.monthlyRollupRetentionMs)) as { changes: number; } ).changes @@ -105,7 +122,7 @@ function getLastRollupSampleMs(db: DatabaseSync): number { return row ? Number(row.state_value) : ZERO_ID; } -function setLastRollupSampleMs(db: DatabaseSync, sampleMs: number): void { +function setLastRollupSampleMs(db: DatabaseSync, sampleMs: number | bigint): void { db.prepare( `INSERT INTO imm_rollup_state (state_key, state_value) VALUES (?, ?) @@ -124,7 +141,7 @@ function resetRollups(db: DatabaseSync): void { function upsertDailyRollupsForGroups( db: DatabaseSync, groups: Array<{ rollupDay: number; videoId: number }>, - rollupNowMs: number, + rollupNowMs: bigint, ): void { if (groups.length === 0) { return; @@ -140,29 +157,32 @@ function upsertDailyRollupsForGroups( CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS rollup_day, s.video_id AS video_id, COUNT(DISTINCT s.session_id) AS total_sessions, - COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min, - COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen, - COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen, - COALESCE(SUM(sm.max_cards), 0) AS total_cards, + COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0 AS total_active_min, + COALESCE(SUM(COALESCE(sm.max_lines, s.lines_seen)), 0) AS total_lines_seen, + COALESCE(SUM(COALESCE(sm.max_tokens, s.tokens_seen)), 0) AS total_tokens_seen, + COALESCE(SUM(COALESCE(sm.max_cards, s.cards_mined)), 0) AS total_cards, CASE - WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0 - THEN (COALESCE(SUM(sm.max_cards), 0) * 60.0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0) + WHEN COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) > 0 + THEN (COALESCE(SUM(COALESCE(sm.max_cards, s.cards_mined)), 0) * 60.0) + / (COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0) ELSE NULL END AS cards_per_hour, CASE - WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0 - THEN COALESCE(SUM(sm.max_tokens), 0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0) + WHEN COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) > 0 + THEN COALESCE(SUM(COALESCE(sm.max_tokens, s.tokens_seen)), 0) + / (COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0) ELSE NULL END AS tokens_per_min, CASE - WHEN COALESCE(SUM(sm.max_lookups), 0) > 0 - THEN CAST(COALESCE(SUM(sm.max_hits), 0) AS REAL) / CAST(SUM(sm.max_lookups) AS REAL) + WHEN COALESCE(SUM(COALESCE(sm.max_lookups, s.lookup_count)), 0) > 0 + THEN CAST(COALESCE(SUM(COALESCE(sm.max_hits, s.lookup_hits)), 0) AS REAL) + / CAST(COALESCE(SUM(COALESCE(sm.max_lookups, s.lookup_count)), 0) AS REAL) ELSE NULL END AS lookup_hit_rate, ? AS CREATED_DATE, ? AS LAST_UPDATE_DATE FROM imm_sessions s - JOIN ( + LEFT JOIN ( SELECT t.session_id, MAX(t.active_watched_ms) AS max_active_ms, @@ -197,7 +217,7 @@ function upsertDailyRollupsForGroups( function upsertMonthlyRollupsForGroups( db: DatabaseSync, groups: Array<{ rollupMonth: number; videoId: number }>, - rollupNowMs: number, + rollupNowMs: bigint, ): void { if (groups.length === 0) { return; @@ -212,14 +232,14 @@ function upsertMonthlyRollupsForGroups( CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) AS rollup_month, s.video_id AS video_id, COUNT(DISTINCT s.session_id) AS total_sessions, - COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min, - COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen, - COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen, - COALESCE(SUM(sm.max_cards), 0) AS total_cards, + COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0 AS total_active_min, + COALESCE(SUM(COALESCE(sm.max_lines, s.lines_seen)), 0) AS total_lines_seen, + COALESCE(SUM(COALESCE(sm.max_tokens, s.tokens_seen)), 0) AS total_tokens_seen, + COALESCE(SUM(COALESCE(sm.max_cards, s.cards_mined)), 0) AS total_cards, ? AS CREATED_DATE, ? AS LAST_UPDATE_DATE FROM imm_sessions s - JOIN ( + LEFT JOIN ( SELECT t.session_id, MAX(t.active_watched_ms) AS max_active_ms, @@ -261,7 +281,7 @@ function getAffectedRollupGroups( FROM imm_session_telemetry t JOIN imm_sessions s ON s.session_id = t.session_id - WHERE t.sample_ms > ? + WHERE t.sample_ms >= ? `, ) .all(lastRollupSampleMs) as unknown as RollupGroupRow[] @@ -301,7 +321,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo return; } - const rollupNowMs = Date.now(); + const rollupNowMs = toDbMs(nowMs()); const lastRollupSampleMs = getLastRollupSampleMs(db); const maxSampleRow = db @@ -336,7 +356,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo try { upsertDailyRollupsForGroups(db, dailyGroups, rollupNowMs); upsertMonthlyRollupsForGroups(db, monthlyGroups, rollupNowMs); - setLastRollupSampleMs(db, Number(maxSampleRow.maxSampleMs)); + setLastRollupSampleMs(db, toDbMs(maxSampleRow.maxSampleMs ?? ZERO_ID)); db.exec('COMMIT'); } catch (error) { db.exec('ROLLBACK'); @@ -345,7 +365,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo } export function rebuildRollupsInTransaction(db: DatabaseSync): void { - const rollupNowMs = Date.now(); + const rollupNowMs = toDbMs(nowMs()); const maxSampleRow = db .prepare('SELECT MAX(sample_ms) AS maxSampleMs FROM imm_session_telemetry') .get() as unknown as RollupTelemetryResult | null; @@ -357,7 +377,7 @@ export function rebuildRollupsInTransaction(db: DatabaseSync): void { const affectedGroups = getAffectedRollupGroups(db, ZERO_ID); if (affectedGroups.length === 0) { - setLastRollupSampleMs(db, Number(maxSampleRow.maxSampleMs)); + setLastRollupSampleMs(db, toDbMs(maxSampleRow.maxSampleMs ?? ZERO_ID)); return; } @@ -376,7 +396,7 @@ export function rebuildRollupsInTransaction(db: DatabaseSync): void { upsertDailyRollupsForGroups(db, dailyGroups, rollupNowMs); upsertMonthlyRollupsForGroups(db, monthlyGroups, rollupNowMs); - setLastRollupSampleMs(db, Number(maxSampleRow.maxSampleMs)); + setLastRollupSampleMs(db, toDbMs(maxSampleRow.maxSampleMs ?? ZERO_ID)); } export function runOptimizeMaintenance(db: DatabaseSync): void { diff --git a/src/core/services/immersion-tracker/query-lexical.ts b/src/core/services/immersion-tracker/query-lexical.ts new file mode 100644 index 0000000..004a713 --- /dev/null +++ b/src/core/services/immersion-tracker/query-lexical.ts @@ -0,0 +1,257 @@ +import type { DatabaseSync } from './sqlite'; +import type { + KanjiAnimeAppearanceRow, + KanjiDetailRow, + KanjiOccurrenceRow, + KanjiStatsRow, + KanjiWordRow, + SessionEventRow, + SimilarWordRow, + VocabularyStatsRow, + WordAnimeAppearanceRow, + WordDetailRow, + WordOccurrenceRow, +} from './types'; + +export function getVocabularyStats( + db: DatabaseSync, + limit = 100, + excludePos?: string[], +): VocabularyStatsRow[] { + const hasExclude = excludePos && excludePos.length > 0; + const placeholders = hasExclude ? excludePos.map(() => '?').join(', ') : ''; + const whereClause = hasExclude + ? `WHERE (part_of_speech IS NULL OR part_of_speech NOT IN (${placeholders}))` + : ''; + const stmt = db.prepare(` + SELECT w.id AS wordId, w.headword, w.word, w.reading, + w.part_of_speech AS partOfSpeech, w.pos1, w.pos2, w.pos3, + w.frequency, w.frequency_rank AS frequencyRank, + w.first_seen AS firstSeen, w.last_seen AS lastSeen, + COUNT(DISTINCT sl.anime_id) AS animeCount + FROM imm_words w + LEFT JOIN imm_word_line_occurrences o ON o.word_id = w.id + LEFT JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id AND sl.anime_id IS NOT NULL + ${whereClause ? whereClause.replace('part_of_speech', 'w.part_of_speech') : ''} + GROUP BY w.id + ORDER BY w.frequency DESC LIMIT ? + `); + const params = hasExclude ? [...excludePos, limit] : [limit]; + return stmt.all(...params) as VocabularyStatsRow[]; +} + +export function getKanjiStats(db: DatabaseSync, limit = 100): KanjiStatsRow[] { + const stmt = db.prepare(` + SELECT id AS kanjiId, kanji, frequency, + first_seen AS firstSeen, last_seen AS lastSeen + FROM imm_kanji ORDER BY frequency DESC LIMIT ? + `); + return stmt.all(limit) as KanjiStatsRow[]; +} + +export function getWordOccurrences( + db: DatabaseSync, + headword: string, + word: string, + reading: string, + limit = 100, + offset = 0, +): WordOccurrenceRow[] { + return db + .prepare( + ` + SELECT + l.anime_id AS animeId, + a.canonical_title AS animeTitle, + l.video_id AS videoId, + v.canonical_title AS videoTitle, + v.source_path AS sourcePath, + l.secondary_text AS secondaryText, + l.session_id AS sessionId, + l.line_index AS lineIndex, + l.segment_start_ms AS segmentStartMs, + l.segment_end_ms AS segmentEndMs, + l.text AS text, + o.occurrence_count AS occurrenceCount + FROM imm_word_line_occurrences o + JOIN imm_words w ON w.id = o.word_id + JOIN imm_subtitle_lines l ON l.line_id = o.line_id + JOIN imm_videos v ON v.video_id = l.video_id + LEFT JOIN imm_anime a ON a.anime_id = l.anime_id + WHERE w.headword = ? AND w.word = ? AND w.reading = ? + ORDER BY l.CREATED_DATE DESC, l.line_id DESC + LIMIT ? + OFFSET ? + `, + ) + .all(headword, word, reading, limit, offset) as unknown as WordOccurrenceRow[]; +} + +export function getKanjiOccurrences( + db: DatabaseSync, + kanji: string, + limit = 100, + offset = 0, +): KanjiOccurrenceRow[] { + return db + .prepare( + ` + SELECT + l.anime_id AS animeId, + a.canonical_title AS animeTitle, + l.video_id AS videoId, + v.canonical_title AS videoTitle, + v.source_path AS sourcePath, + l.secondary_text AS secondaryText, + l.session_id AS sessionId, + l.line_index AS lineIndex, + l.segment_start_ms AS segmentStartMs, + l.segment_end_ms AS segmentEndMs, + l.text AS text, + o.occurrence_count AS occurrenceCount + FROM imm_kanji_line_occurrences o + JOIN imm_kanji k ON k.id = o.kanji_id + JOIN imm_subtitle_lines l ON l.line_id = o.line_id + JOIN imm_videos v ON v.video_id = l.video_id + LEFT JOIN imm_anime a ON a.anime_id = l.anime_id + WHERE k.kanji = ? + ORDER BY l.CREATED_DATE DESC, l.line_id DESC + LIMIT ? + OFFSET ? + `, + ) + .all(kanji, limit, offset) as unknown as KanjiOccurrenceRow[]; +} + +export function getSessionEvents( + db: DatabaseSync, + sessionId: number, + limit = 500, + eventTypes?: number[], +): SessionEventRow[] { + if (!eventTypes || eventTypes.length === 0) { + const stmt = db.prepare(` + SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload + FROM imm_session_events WHERE session_id = ? ORDER BY ts_ms ASC LIMIT ? + `); + return stmt.all(sessionId, limit) as SessionEventRow[]; + } + + const placeholders = eventTypes.map(() => '?').join(', '); + const stmt = db.prepare(` + SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload + FROM imm_session_events + WHERE session_id = ? AND event_type IN (${placeholders}) + ORDER BY ts_ms ASC + LIMIT ? + `); + return stmt.all(sessionId, ...eventTypes, limit) as SessionEventRow[]; +} + +export function getWordDetail(db: DatabaseSync, wordId: number): WordDetailRow | null { + return db + .prepare( + ` + SELECT id AS wordId, headword, word, reading, + part_of_speech AS partOfSpeech, pos1, pos2, pos3, + frequency, first_seen AS firstSeen, last_seen AS lastSeen + FROM imm_words WHERE id = ? + `, + ) + .get(wordId) as WordDetailRow | null; +} + +export function getWordAnimeAppearances( + db: DatabaseSync, + wordId: number, +): WordAnimeAppearanceRow[] { + return db + .prepare( + ` + SELECT a.anime_id AS animeId, a.canonical_title AS animeTitle, + SUM(o.occurrence_count) AS occurrenceCount + FROM imm_word_line_occurrences o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + JOIN imm_anime a ON a.anime_id = sl.anime_id + WHERE o.word_id = ? AND sl.anime_id IS NOT NULL + GROUP BY a.anime_id + ORDER BY occurrenceCount DESC + `, + ) + .all(wordId) as WordAnimeAppearanceRow[]; +} + +export function getSimilarWords(db: DatabaseSync, wordId: number, limit = 10): SimilarWordRow[] { + const word = db.prepare('SELECT headword, reading FROM imm_words WHERE id = ?').get(wordId) as { + headword: string; + reading: string; + } | null; + if (!word || word.headword.trim() === '') return []; + return db + .prepare( + ` + SELECT id AS wordId, headword, word, reading, frequency + FROM imm_words + WHERE id != ? + AND (reading = ? OR headword LIKE ? OR headword LIKE ?) + ORDER BY frequency DESC + LIMIT ? + `, + ) + .all( + wordId, + word.reading, + `%${word.headword.charAt(0)}%`, + `%${word.headword.charAt(word.headword.length - 1)}%`, + limit, + ) as SimilarWordRow[]; +} + +export function getKanjiDetail(db: DatabaseSync, kanjiId: number): KanjiDetailRow | null { + return db + .prepare( + ` + SELECT id AS kanjiId, kanji, frequency, first_seen AS firstSeen, last_seen AS lastSeen + FROM imm_kanji WHERE id = ? + `, + ) + .get(kanjiId) as KanjiDetailRow | null; +} + +export function getKanjiAnimeAppearances( + db: DatabaseSync, + kanjiId: number, +): KanjiAnimeAppearanceRow[] { + return db + .prepare( + ` + SELECT a.anime_id AS animeId, a.canonical_title AS animeTitle, + SUM(o.occurrence_count) AS occurrenceCount + FROM imm_kanji_line_occurrences o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + JOIN imm_anime a ON a.anime_id = sl.anime_id + WHERE o.kanji_id = ? AND sl.anime_id IS NOT NULL + GROUP BY a.anime_id + ORDER BY occurrenceCount DESC + `, + ) + .all(kanjiId) as KanjiAnimeAppearanceRow[]; +} + +export function getKanjiWords(db: DatabaseSync, kanjiId: number, limit = 20): KanjiWordRow[] { + const kanjiRow = db.prepare('SELECT kanji FROM imm_kanji WHERE id = ?').get(kanjiId) as { + kanji: string; + } | null; + if (!kanjiRow) return []; + return db + .prepare( + ` + SELECT id AS wordId, headword, word, reading, frequency + FROM imm_words + WHERE headword LIKE ? + ORDER BY frequency DESC + LIMIT ? + `, + ) + .all(`%${kanjiRow.kanji}%`, limit) as KanjiWordRow[]; +} diff --git a/src/core/services/immersion-tracker/query-library.ts b/src/core/services/immersion-tracker/query-library.ts new file mode 100644 index 0000000..cd03d6b --- /dev/null +++ b/src/core/services/immersion-tracker/query-library.ts @@ -0,0 +1,576 @@ +import type { DatabaseSync } from './sqlite'; +import type { + AnimeAnilistEntryRow, + AnimeDetailRow, + AnimeEpisodeRow, + AnimeLibraryRow, + AnimeWordRow, + EpisodeCardEventRow, + EpisodesPerDayRow, + ImmersionSessionRollupRow, + MediaArtRow, + MediaDetailRow, + MediaLibraryRow, + NewAnimePerDayRow, + SessionSummaryQueryRow, + StreakCalendarRow, + WatchTimePerAnimeRow, +} from './types'; +import { ACTIVE_SESSION_METRICS_CTE, resolvedCoverBlobExpr } from './query-shared'; + +export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] { + return db + .prepare( + ` + SELECT + a.anime_id AS animeId, + a.canonical_title AS canonicalTitle, + a.anilist_id AS anilistId, + COALESCE(lm.total_sessions, 0) AS totalSessions, + COALESCE(lm.total_active_ms, 0) AS totalActiveMs, + COALESCE(lm.total_cards, 0) AS totalCards, + COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, + COUNT(DISTINCT v.video_id) AS episodeCount, + a.episodes_total AS episodesTotal, + COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs + FROM imm_anime a + JOIN imm_lifetime_anime lm ON lm.anime_id = a.anime_id + JOIN imm_videos v ON v.anime_id = a.anime_id + GROUP BY a.anime_id + ORDER BY totalActiveMs DESC, lm.last_watched_ms DESC, canonicalTitle ASC + `, + ) + .all() as unknown as AnimeLibraryRow[]; +} + +export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRow | null { + return db + .prepare( + ` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + a.anime_id AS animeId, + a.canonical_title AS canonicalTitle, + a.anilist_id AS anilistId, + a.title_romaji AS titleRomaji, + a.title_english AS titleEnglish, + a.title_native AS titleNative, + a.description AS description, + COALESCE(lm.total_sessions, 0) AS totalSessions, + COALESCE(lm.total_active_ms, 0) AS totalActiveMs, + COALESCE(lm.total_cards, 0) AS totalCards, + COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, + COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen, + COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount, + COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits, + COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, + COUNT(DISTINCT v.video_id) AS episodeCount, + COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs + FROM imm_anime a + JOIN imm_lifetime_anime lm ON lm.anime_id = a.anime_id + JOIN imm_videos v ON v.anime_id = a.anime_id + LEFT JOIN imm_sessions s ON s.video_id = v.video_id + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + WHERE a.anime_id = ? + GROUP BY a.anime_id + `, + ) + .get(animeId) as unknown as AnimeDetailRow | null; +} + +export function getAnimeAnilistEntries(db: DatabaseSync, animeId: number): AnimeAnilistEntryRow[] { + return db + .prepare( + ` + SELECT DISTINCT + m.anilist_id AS anilistId, + m.title_romaji AS titleRomaji, + m.title_english AS titleEnglish, + v.parsed_season AS season + FROM imm_videos v + JOIN imm_media_art m ON m.video_id = v.video_id + WHERE v.anime_id = ? + AND m.anilist_id IS NOT NULL + ORDER BY v.parsed_season ASC + `, + ) + .all(animeId) as unknown as AnimeAnilistEntryRow[]; +} + +export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisodeRow[] { + return db + .prepare( + ` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + v.anime_id AS animeId, + v.video_id AS videoId, + v.canonical_title AS canonicalTitle, + v.parsed_title AS parsedTitle, + v.parsed_season AS season, + v.parsed_episode AS episode, + v.duration_ms AS durationMs, + ( + SELECT COALESCE( + NULLIF(s_recent.ended_media_ms, 0), + ( + SELECT MAX(line.segment_end_ms) + FROM imm_subtitle_lines line + WHERE line.session_id = s_recent.session_id + AND line.segment_end_ms IS NOT NULL + ), + ( + SELECT MAX(event.segment_end_ms) + FROM imm_session_events event + WHERE event.session_id = s_recent.session_id + AND event.segment_end_ms IS NOT NULL + ) + ) + FROM imm_sessions s_recent + WHERE s_recent.video_id = v.video_id + AND ( + s_recent.ended_media_ms IS NOT NULL + OR EXISTS ( + SELECT 1 + FROM imm_subtitle_lines line + WHERE line.session_id = s_recent.session_id + AND line.segment_end_ms IS NOT NULL + ) + OR EXISTS ( + SELECT 1 + FROM imm_session_events event + WHERE event.session_id = s_recent.session_id + AND event.segment_end_ms IS NOT NULL + ) + ) + ORDER BY + COALESCE(s_recent.ended_at_ms, s_recent.LAST_UPDATE_DATE, s_recent.started_at_ms) DESC, + s_recent.session_id DESC + LIMIT 1 + ) AS endedMediaMs, + v.watched AS watched, + COUNT(DISTINCT s.session_id) AS totalSessions, + COALESCE(SUM(COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0)), 0) AS totalActiveMs, + COALESCE(SUM(COALESCE(asm.cardsMined, s.cards_mined, 0)), 0) AS totalCards, + COALESCE(SUM(COALESCE(asm.tokensSeen, s.tokens_seen, 0)), 0) AS totalTokensSeen, + COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, + MAX(s.started_at_ms) AS lastWatchedMs + FROM imm_videos v + LEFT JOIN imm_sessions s ON s.video_id = v.video_id + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + WHERE v.anime_id = ? + GROUP BY v.video_id + ORDER BY + CASE WHEN v.parsed_season IS NULL THEN 1 ELSE 0 END, + v.parsed_season ASC, + CASE WHEN v.parsed_episode IS NULL THEN 1 ELSE 0 END, + v.parsed_episode ASC, + v.video_id ASC + `, + ) + .all(animeId) as unknown as AnimeEpisodeRow[]; +} + +export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] { + return db + .prepare( + ` + SELECT + v.video_id AS videoId, + v.canonical_title AS canonicalTitle, + COALESCE(lm.total_sessions, 0) AS totalSessions, + COALESCE(lm.total_active_ms, 0) AS totalActiveMs, + COALESCE(lm.total_cards, 0) AS totalCards, + COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, + COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs, + yv.youtube_video_id AS youtubeVideoId, + yv.video_url AS videoUrl, + yv.video_title AS videoTitle, + yv.video_thumbnail_url AS videoThumbnailUrl, + yv.channel_id AS channelId, + yv.channel_name AS channelName, + yv.channel_url AS channelUrl, + yv.channel_thumbnail_url AS channelThumbnailUrl, + yv.uploader_id AS uploaderId, + yv.uploader_url AS uploaderUrl, + yv.description AS description, + CASE + WHEN ma.cover_blob_hash IS NOT NULL OR ma.cover_blob IS NOT NULL THEN 1 + ELSE 0 + END AS hasCoverArt + FROM imm_videos v + JOIN imm_lifetime_media lm ON lm.video_id = v.video_id + LEFT JOIN imm_media_art ma ON ma.video_id = v.video_id + LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id + ORDER BY lm.last_watched_ms DESC + `, + ) + .all() as unknown as MediaLibraryRow[]; +} + +export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRow | null { + return db + .prepare( + ` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + v.video_id AS videoId, + v.canonical_title AS canonicalTitle, + v.anime_id AS animeId, + COALESCE(lm.total_sessions, 0) AS totalSessions, + COALESCE(lm.total_active_ms, 0) AS totalActiveMs, + COALESCE(lm.total_cards, 0) AS totalCards, + COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, + COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen, + COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount, + COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits, + COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, + yv.youtube_video_id AS youtubeVideoId, + yv.video_url AS videoUrl, + yv.video_title AS videoTitle, + yv.video_thumbnail_url AS videoThumbnailUrl, + yv.channel_id AS channelId, + yv.channel_name AS channelName, + yv.channel_url AS channelUrl, + yv.channel_thumbnail_url AS channelThumbnailUrl, + yv.uploader_id AS uploaderId, + yv.uploader_url AS uploaderUrl, + yv.description AS description + FROM imm_videos v + JOIN imm_lifetime_media lm ON lm.video_id = v.video_id + LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id + LEFT JOIN imm_sessions s ON s.video_id = v.video_id + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + WHERE v.video_id = ? + GROUP BY v.video_id + `, + ) + .get(videoId) as unknown as MediaDetailRow | null; +} + +export function getMediaSessions( + db: DatabaseSync, + videoId: number, + limit = 100, +): SessionSummaryQueryRow[] { + return db + .prepare( + ` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + s.session_id AS sessionId, + s.video_id AS videoId, + v.canonical_title AS canonicalTitle, + s.started_at_ms AS startedAtMs, + s.ended_at_ms AS endedAtMs, + COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs, + COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, + COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen, + COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, + COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, + COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount, + COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits, + COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount + FROM imm_sessions s + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + LEFT JOIN imm_videos v ON v.video_id = s.video_id + WHERE s.video_id = ? + ORDER BY s.started_at_ms DESC + LIMIT ? + `, + ) + .all(videoId, limit) as unknown as SessionSummaryQueryRow[]; +} + +export function getMediaDailyRollups( + db: DatabaseSync, + videoId: number, + limit = 90, +): ImmersionSessionRollupRow[] { + return db + .prepare( + ` + WITH recent_days AS ( + SELECT DISTINCT rollup_day + FROM imm_daily_rollups + WHERE video_id = ? + ORDER BY rollup_day DESC + LIMIT ? + ) + SELECT + rollup_day AS rollupDayOrMonth, + video_id AS videoId, + total_sessions AS totalSessions, + total_active_min AS totalActiveMin, + total_lines_seen AS totalLinesSeen, + total_tokens_seen AS totalTokensSeen, + total_cards AS totalCards, + cards_per_hour AS cardsPerHour, + tokens_per_min AS tokensPerMin, + lookup_hit_rate AS lookupHitRate + FROM imm_daily_rollups + WHERE video_id = ? + AND rollup_day IN (SELECT rollup_day FROM recent_days) + ORDER BY rollup_day DESC, video_id DESC + `, + ) + .all(videoId, limit, videoId) as unknown as ImmersionSessionRollupRow[]; +} + +export function getAnimeDailyRollups( + db: DatabaseSync, + animeId: number, + limit = 90, +): ImmersionSessionRollupRow[] { + return db + .prepare( + ` + WITH recent_days AS ( + SELECT DISTINCT r.rollup_day + FROM imm_daily_rollups r + JOIN imm_videos v ON v.video_id = r.video_id + WHERE v.anime_id = ? + ORDER BY r.rollup_day DESC + LIMIT ? + ) + SELECT r.rollup_day AS rollupDayOrMonth, r.video_id AS videoId, + r.total_sessions AS totalSessions, r.total_active_min AS totalActiveMin, + r.total_lines_seen AS totalLinesSeen, + r.total_tokens_seen AS totalTokensSeen, r.total_cards AS totalCards, + r.cards_per_hour AS cardsPerHour, r.tokens_per_min AS tokensPerMin, + r.lookup_hit_rate AS lookupHitRate + FROM imm_daily_rollups r + JOIN imm_videos v ON v.video_id = r.video_id + WHERE v.anime_id = ? + AND r.rollup_day IN (SELECT rollup_day FROM recent_days) + ORDER BY r.rollup_day DESC, r.video_id DESC + `, + ) + .all(animeId, limit, animeId) as unknown as ImmersionSessionRollupRow[]; +} + +export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow | null { + const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab'); + return db + .prepare( + ` + SELECT + a.video_id AS videoId, + a.anilist_id AS anilistId, + a.cover_url AS coverUrl, + ${resolvedCoverBlob} AS coverBlob, + a.title_romaji AS titleRomaji, + a.title_english AS titleEnglish, + a.episodes_total AS episodesTotal, + a.fetched_at_ms AS fetchedAtMs + FROM imm_media_art a + JOIN imm_videos v ON v.video_id = a.video_id + LEFT JOIN imm_cover_art_blobs cab ON cab.blob_hash = a.cover_blob_hash + WHERE v.anime_id = ? + AND ${resolvedCoverBlob} IS NOT NULL + ORDER BY a.fetched_at_ms DESC, a.video_id DESC + LIMIT 1 + `, + ) + .get(animeId) as unknown as MediaArtRow | null; +} + +export function getCoverArt(db: DatabaseSync, videoId: number): MediaArtRow | null { + const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab'); + return db + .prepare( + ` + SELECT + a.video_id AS videoId, + a.anilist_id AS anilistId, + a.cover_url AS coverUrl, + ${resolvedCoverBlob} AS coverBlob, + a.title_romaji AS titleRomaji, + a.title_english AS titleEnglish, + a.episodes_total AS episodesTotal, + a.fetched_at_ms AS fetchedAtMs + FROM imm_media_art a + LEFT JOIN imm_cover_art_blobs cab ON cab.blob_hash = a.cover_blob_hash + WHERE a.video_id = ? + `, + ) + .get(videoId) as unknown as MediaArtRow | null; +} + +export function getStreakCalendar(db: DatabaseSync, days = 90): StreakCalendarRow[] { + const now = new Date(); + const localMidnight = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime(); + const todayLocalDay = Math.floor(localMidnight / 86_400_000); + const cutoffDay = todayLocalDay - days; + return db + .prepare( + ` + SELECT rollup_day AS epochDay, SUM(total_active_min) AS totalActiveMin + FROM imm_daily_rollups + WHERE rollup_day >= ? + GROUP BY rollup_day + ORDER BY rollup_day ASC + `, + ) + .all(cutoffDay) as StreakCalendarRow[]; +} + +export function getAnimeWords(db: DatabaseSync, animeId: number, limit = 50): AnimeWordRow[] { + return db + .prepare( + ` + SELECT w.id AS wordId, w.headword, w.word, w.reading, w.part_of_speech AS partOfSpeech, + SUM(o.occurrence_count) AS frequency + FROM imm_word_line_occurrences o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + JOIN imm_words w ON w.id = o.word_id + WHERE sl.anime_id = ? + GROUP BY w.id + ORDER BY frequency DESC + LIMIT ? + `, + ) + .all(animeId, limit) as unknown as AnimeWordRow[]; +} + +export function getEpisodesPerDay(db: DatabaseSync, limit = 90): EpisodesPerDayRow[] { + return db + .prepare( + ` + SELECT CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS epochDay, + COUNT(DISTINCT s.video_id) AS episodeCount + FROM imm_sessions s + GROUP BY epochDay + ORDER BY epochDay DESC + LIMIT ? + `, + ) + .all(limit) as EpisodesPerDayRow[]; +} + +export function getNewAnimePerDay(db: DatabaseSync, limit = 90): NewAnimePerDayRow[] { + return db + .prepare( + ` + SELECT first_day AS epochDay, COUNT(*) AS newAnimeCount + FROM ( + SELECT CAST(julianday(MIN(s.started_at_ms) / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS first_day + FROM imm_sessions s + JOIN imm_videos v ON v.video_id = s.video_id + WHERE v.anime_id IS NOT NULL + GROUP BY v.anime_id + ) + GROUP BY first_day + ORDER BY first_day DESC + LIMIT ? + `, + ) + .all(limit) as NewAnimePerDayRow[]; +} + +export function getWatchTimePerAnime(db: DatabaseSync, limit = 90): WatchTimePerAnimeRow[] { + const nowD = new Date(); + const cutoffDay = + Math.floor( + new Date(nowD.getFullYear(), nowD.getMonth(), nowD.getDate()).getTime() / 86_400_000, + ) - limit; + return db + .prepare( + ` + SELECT r.rollup_day AS epochDay, a.anime_id AS animeId, + a.canonical_title AS animeTitle, + SUM(r.total_active_min) AS totalActiveMin + FROM imm_daily_rollups r + JOIN imm_videos v ON v.video_id = r.video_id + JOIN imm_anime a ON a.anime_id = v.anime_id + WHERE r.rollup_day >= ? + GROUP BY r.rollup_day, a.anime_id + ORDER BY r.rollup_day ASC + `, + ) + .all(cutoffDay) as WatchTimePerAnimeRow[]; +} + +export function getEpisodeWords(db: DatabaseSync, videoId: number, limit = 50): AnimeWordRow[] { + return db + .prepare( + ` + SELECT w.id AS wordId, w.headword, w.word, w.reading, w.part_of_speech AS partOfSpeech, + SUM(o.occurrence_count) AS frequency + FROM imm_word_line_occurrences o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + JOIN imm_words w ON w.id = o.word_id + WHERE sl.video_id = ? + GROUP BY w.id + ORDER BY frequency DESC + LIMIT ? + `, + ) + .all(videoId, limit) as unknown as AnimeWordRow[]; +} + +export function getEpisodeSessions(db: DatabaseSync, videoId: number): SessionSummaryQueryRow[] { + return db + .prepare( + ` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + s.session_id AS sessionId, s.video_id AS videoId, + v.canonical_title AS canonicalTitle, + s.started_at_ms AS startedAtMs, s.ended_at_ms AS endedAtMs, + COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs, + COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, + COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen, + COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, + COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, + COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount, + COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits, + COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount + FROM imm_sessions s + JOIN imm_videos v ON v.video_id = s.video_id + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + WHERE s.video_id = ? + ORDER BY s.started_at_ms DESC + `, + ) + .all(videoId) as SessionSummaryQueryRow[]; +} + +export function getEpisodeCardEvents(db: DatabaseSync, videoId: number): EpisodeCardEventRow[] { + const rows = db + .prepare( + ` + SELECT e.event_id AS eventId, e.session_id AS sessionId, + e.ts_ms AS tsMs, e.cards_delta AS cardsDelta, + e.payload_json AS payloadJson + FROM imm_session_events e + JOIN imm_sessions s ON s.session_id = e.session_id + WHERE s.video_id = ? AND e.event_type = 4 + ORDER BY e.ts_ms DESC + `, + ) + .all(videoId) as Array<{ + eventId: number; + sessionId: number; + tsMs: number; + cardsDelta: number; + payloadJson: string | null; + }>; + + return rows.map((row) => { + let noteIds: number[] = []; + if (row.payloadJson) { + try { + const parsed = JSON.parse(row.payloadJson); + if (Array.isArray(parsed.noteIds)) noteIds = parsed.noteIds; + } catch {} + } + return { + eventId: row.eventId, + sessionId: row.sessionId, + tsMs: row.tsMs, + cardsDelta: row.cardsDelta, + noteIds, + }; + }); +} diff --git a/src/core/services/immersion-tracker/query-maintenance.ts b/src/core/services/immersion-tracker/query-maintenance.ts new file mode 100644 index 0000000..022386f --- /dev/null +++ b/src/core/services/immersion-tracker/query-maintenance.ts @@ -0,0 +1,546 @@ +import { createHash } from 'node:crypto'; +import type { DatabaseSync } from './sqlite'; +import { buildCoverBlobReference, normalizeCoverBlobBytes } from './storage'; +import { rebuildLifetimeSummariesInTransaction } from './lifetime'; +import { rebuildRollupsInTransaction } from './maintenance'; +import { nowMs } from './time'; +import { PartOfSpeech, type MergedToken } from '../../../types'; +import { shouldExcludeTokenFromVocabularyPersistence } from '../tokenizer/annotation-stage'; +import { deriveStoredPartOfSpeech } from '../tokenizer/part-of-speech'; +import { + cleanupUnusedCoverArtBlobHash, + deleteSessionsByIds, + findSharedCoverBlobHash, + getAffectedKanjiIdsForSessions, + getAffectedKanjiIdsForVideo, + getAffectedWordIdsForSessions, + getAffectedWordIdsForVideo, + refreshLexicalAggregates, + toDbMs, +} from './query-shared'; + +type CleanupVocabularyRow = { + id: number; + word: string; + headword: string; + reading: string | null; + part_of_speech: string | null; + pos1: string | null; + pos2: string | null; + pos3: string | null; + first_seen: number | null; + last_seen: number | null; + frequency: number | null; +}; + +type ResolvedVocabularyPos = { + headword: string; + reading: string; + hasPosMetadata: boolean; + partOfSpeech: PartOfSpeech; + pos1: string; + pos2: string; + pos3: string; +}; + +type CleanupVocabularyStatsOptions = { + resolveLegacyPos?: (row: CleanupVocabularyRow) => Promise<{ + headword: string; + reading: string; + partOfSpeech: string; + pos1: string; + pos2: string; + pos3: string; + } | null>; +}; + +function toStoredWordToken(row: { + word: string; + headword: string; + part_of_speech: string | null; + pos1: string | null; + pos2: string | null; + pos3: string | null; +}): MergedToken { + return { + surface: row.word || row.headword || '', + reading: '', + headword: row.headword || row.word || '', + startPos: 0, + endPos: 0, + partOfSpeech: deriveStoredPartOfSpeech({ + partOfSpeech: row.part_of_speech, + pos1: row.pos1, + }), + pos1: row.pos1 ?? '', + pos2: row.pos2 ?? '', + pos3: row.pos3 ?? '', + isMerged: true, + isKnown: false, + isNPlusOneTarget: false, + }; +} + +function normalizePosField(value: string | null | undefined): string { + return typeof value === 'string' ? value.trim() : ''; +} + +function resolveStoredVocabularyPos(row: CleanupVocabularyRow): ResolvedVocabularyPos | null { + const headword = normalizePosField(row.headword); + const reading = normalizePosField(row.reading); + const partOfSpeechRaw = typeof row.part_of_speech === 'string' ? row.part_of_speech.trim() : ''; + const pos1 = normalizePosField(row.pos1); + const pos2 = normalizePosField(row.pos2); + const pos3 = normalizePosField(row.pos3); + + if (!headword && !reading && !partOfSpeechRaw && !pos1 && !pos2 && !pos3) { + return null; + } + + return { + headword: headword || normalizePosField(row.word), + reading, + hasPosMetadata: Boolean(partOfSpeechRaw || pos1 || pos2 || pos3), + partOfSpeech: deriveStoredPartOfSpeech({ + partOfSpeech: partOfSpeechRaw, + pos1, + }), + pos1, + pos2, + pos3, + }; +} + +function hasStructuredPos(pos: ResolvedVocabularyPos | null): boolean { + return Boolean(pos?.hasPosMetadata && (pos.pos1 || pos.pos2 || pos.pos3 || pos.partOfSpeech)); +} + +function needsLegacyVocabularyMetadataRepair( + row: CleanupVocabularyRow, + stored: ResolvedVocabularyPos | null, +): boolean { + if (!stored) { + return true; + } + + if (!hasStructuredPos(stored)) { + return true; + } + + if (!stored.reading) { + return true; + } + + if (!stored.headword) { + return true; + } + + return stored.headword === normalizePosField(row.word); +} + +function shouldUpdateStoredVocabularyPos( + row: CleanupVocabularyRow, + next: ResolvedVocabularyPos, +): boolean { + return ( + normalizePosField(row.headword) !== next.headword || + normalizePosField(row.reading) !== next.reading || + (next.hasPosMetadata && + (normalizePosField(row.part_of_speech) !== next.partOfSpeech || + normalizePosField(row.pos1) !== next.pos1 || + normalizePosField(row.pos2) !== next.pos2 || + normalizePosField(row.pos3) !== next.pos3)) + ); +} + +function chooseMergedPartOfSpeech( + current: string | null | undefined, + incoming: ResolvedVocabularyPos, +): string { + const normalizedCurrent = normalizePosField(current); + if ( + normalizedCurrent && + normalizedCurrent !== PartOfSpeech.other && + incoming.partOfSpeech === PartOfSpeech.other + ) { + return normalizedCurrent; + } + return incoming.partOfSpeech; +} + +async function maybeResolveLegacyVocabularyPos( + row: CleanupVocabularyRow, + options: CleanupVocabularyStatsOptions, +): Promise { + const stored = resolveStoredVocabularyPos(row); + if (!needsLegacyVocabularyMetadataRepair(row, stored) || !options.resolveLegacyPos) { + return stored; + } + + const resolved = await options.resolveLegacyPos(row); + if (resolved) { + return { + headword: normalizePosField(resolved.headword) || normalizePosField(row.word), + reading: normalizePosField(resolved.reading), + hasPosMetadata: true, + partOfSpeech: deriveStoredPartOfSpeech({ + partOfSpeech: resolved.partOfSpeech, + pos1: resolved.pos1, + }), + pos1: normalizePosField(resolved.pos1), + pos2: normalizePosField(resolved.pos2), + pos3: normalizePosField(resolved.pos3), + }; + } + + return stored; +} + +export async function cleanupVocabularyStats( + db: DatabaseSync, + options: CleanupVocabularyStatsOptions = {}, +): Promise<{ scanned: number; kept: number; deleted: number; repaired: number }> { + const rows = db + .prepare( + `SELECT id, word, headword, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency + FROM imm_words`, + ) + .all() as CleanupVocabularyRow[]; + const findDuplicateStmt = db.prepare( + `SELECT id, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency + FROM imm_words + WHERE headword = ? AND word = ? AND reading = ? AND id != ?`, + ); + const deleteStmt = db.prepare('DELETE FROM imm_words WHERE id = ?'); + const updateStmt = db.prepare( + `UPDATE imm_words + SET headword = ?, reading = ?, part_of_speech = ?, pos1 = ?, pos2 = ?, pos3 = ? + WHERE id = ?`, + ); + const mergeWordStmt = db.prepare( + `UPDATE imm_words + SET + frequency = COALESCE(frequency, 0) + ?, + part_of_speech = ?, + pos1 = ?, + pos2 = ?, + pos3 = ?, + first_seen = MIN(COALESCE(first_seen, ?), ?), + last_seen = MAX(COALESCE(last_seen, ?), ?) + WHERE id = ?`, + ); + const moveOccurrencesStmt = db.prepare( + `INSERT INTO imm_word_line_occurrences (line_id, word_id, occurrence_count) + SELECT line_id, ?, occurrence_count + FROM imm_word_line_occurrences + WHERE word_id = ? + ON CONFLICT(line_id, word_id) DO UPDATE SET + occurrence_count = imm_word_line_occurrences.occurrence_count + excluded.occurrence_count`, + ); + const deleteOccurrencesStmt = db.prepare( + 'DELETE FROM imm_word_line_occurrences WHERE word_id = ?', + ); + let kept = 0; + let deleted = 0; + let repaired = 0; + + for (const row of rows) { + const resolvedPos = await maybeResolveLegacyVocabularyPos(row, options); + const shouldRepair = Boolean(resolvedPos && shouldUpdateStoredVocabularyPos(row, resolvedPos)); + if (resolvedPos && shouldRepair) { + const duplicate = findDuplicateStmt.get( + resolvedPos.headword, + row.word, + resolvedPos.reading, + row.id, + ) as { + id: number; + part_of_speech: string | null; + pos1: string | null; + pos2: string | null; + pos3: string | null; + first_seen: number | null; + last_seen: number | null; + frequency: number | null; + } | null; + if (duplicate) { + moveOccurrencesStmt.run(duplicate.id, row.id); + deleteOccurrencesStmt.run(row.id); + mergeWordStmt.run( + row.frequency ?? 0, + chooseMergedPartOfSpeech(duplicate.part_of_speech, resolvedPos), + normalizePosField(duplicate.pos1) || resolvedPos.pos1, + normalizePosField(duplicate.pos2) || resolvedPos.pos2, + normalizePosField(duplicate.pos3) || resolvedPos.pos3, + row.first_seen ?? duplicate.first_seen ?? 0, + row.first_seen ?? duplicate.first_seen ?? 0, + row.last_seen ?? duplicate.last_seen ?? 0, + row.last_seen ?? duplicate.last_seen ?? 0, + duplicate.id, + ); + deleteStmt.run(row.id); + repaired += 1; + deleted += 1; + continue; + } + + updateStmt.run( + resolvedPos.headword, + resolvedPos.reading, + resolvedPos.partOfSpeech, + resolvedPos.pos1, + resolvedPos.pos2, + resolvedPos.pos3, + row.id, + ); + repaired += 1; + } + + const effectiveRow = { + ...row, + headword: resolvedPos?.headword ?? row.headword, + reading: resolvedPos?.reading ?? row.reading, + part_of_speech: resolvedPos?.hasPosMetadata ? resolvedPos.partOfSpeech : row.part_of_speech, + pos1: resolvedPos?.pos1 ?? row.pos1, + pos2: resolvedPos?.pos2 ?? row.pos2, + pos3: resolvedPos?.pos3 ?? row.pos3, + }; + const missingPos = + !normalizePosField(effectiveRow.part_of_speech) && + !normalizePosField(effectiveRow.pos1) && + !normalizePosField(effectiveRow.pos2) && + !normalizePosField(effectiveRow.pos3); + if ( + missingPos || + shouldExcludeTokenFromVocabularyPersistence(toStoredWordToken(effectiveRow)) + ) { + deleteStmt.run(row.id); + deleted += 1; + continue; + } + kept += 1; + } + + return { + scanned: rows.length, + kept, + deleted, + repaired, + }; +} + +export function upsertCoverArt( + db: DatabaseSync, + videoId: number, + art: { + anilistId: number | null; + coverUrl: string | null; + coverBlob: ArrayBuffer | Uint8Array | Buffer | null; + titleRomaji: string | null; + titleEnglish: string | null; + episodesTotal: number | null; + }, +): void { + const existing = db + .prepare( + ` + SELECT cover_blob_hash AS coverBlobHash + FROM imm_media_art + WHERE video_id = ? + `, + ) + .get(videoId) as { coverBlobHash: string | null } | undefined; + const sharedCoverBlobHash = findSharedCoverBlobHash(db, videoId, art.anilistId, art.coverUrl); + const fetchedAtMs = toDbMs(nowMs()); + const coverBlob = normalizeCoverBlobBytes(art.coverBlob); + const computedCoverBlobHash = + coverBlob && coverBlob.length > 0 + ? createHash('sha256').update(coverBlob).digest('hex') + : null; + let coverBlobHash = computedCoverBlobHash ?? sharedCoverBlobHash ?? null; + if (!coverBlobHash && (!coverBlob || coverBlob.length === 0)) { + coverBlobHash = existing?.coverBlobHash ?? null; + } + + if (computedCoverBlobHash && coverBlob && coverBlob.length > 0) { + db.prepare( + ` + INSERT INTO imm_cover_art_blobs (blob_hash, cover_blob, CREATED_DATE, LAST_UPDATE_DATE) + VALUES (?, ?, ?, ?) + ON CONFLICT(blob_hash) DO UPDATE SET + LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE + `, + ).run(computedCoverBlobHash, coverBlob, fetchedAtMs, fetchedAtMs); + } + + db.prepare( + ` + INSERT INTO imm_media_art ( + video_id, anilist_id, cover_url, cover_blob, cover_blob_hash, + title_romaji, title_english, episodes_total, + fetched_at_ms, CREATED_DATE, LAST_UPDATE_DATE + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(video_id) DO UPDATE SET + anilist_id = excluded.anilist_id, + cover_url = excluded.cover_url, + cover_blob = excluded.cover_blob, + cover_blob_hash = excluded.cover_blob_hash, + title_romaji = excluded.title_romaji, + title_english = excluded.title_english, + episodes_total = excluded.episodes_total, + fetched_at_ms = excluded.fetched_at_ms, + LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE + `, + ).run( + videoId, + art.anilistId, + art.coverUrl, + coverBlobHash ? buildCoverBlobReference(coverBlobHash) : coverBlob, + coverBlobHash, + art.titleRomaji, + art.titleEnglish, + art.episodesTotal, + fetchedAtMs, + fetchedAtMs, + fetchedAtMs, + ); + + if (existing?.coverBlobHash !== coverBlobHash) { + cleanupUnusedCoverArtBlobHash(db, existing?.coverBlobHash ?? null); + } +} + +export function updateAnimeAnilistInfo( + db: DatabaseSync, + videoId: number, + info: { + anilistId: number; + titleRomaji: string | null; + titleEnglish: string | null; + titleNative: string | null; + episodesTotal: number | null; + }, +): void { + const row = db.prepare('SELECT anime_id FROM imm_videos WHERE video_id = ?').get(videoId) as { + anime_id: number | null; + } | null; + if (!row?.anime_id) return; + + db.prepare( + ` + UPDATE imm_anime + SET + anilist_id = COALESCE(?, anilist_id), + title_romaji = COALESCE(?, title_romaji), + title_english = COALESCE(?, title_english), + title_native = COALESCE(?, title_native), + episodes_total = COALESCE(?, episodes_total), + LAST_UPDATE_DATE = ? + WHERE anime_id = ? + `, + ).run( + info.anilistId, + info.titleRomaji, + info.titleEnglish, + info.titleNative, + info.episodesTotal, + toDbMs(nowMs()), + row.anime_id, + ); +} + +export function markVideoWatched(db: DatabaseSync, videoId: number, watched: boolean): void { + db.prepare('UPDATE imm_videos SET watched = ?, LAST_UPDATE_DATE = ? WHERE video_id = ?').run( + watched ? 1 : 0, + toDbMs(nowMs()), + videoId, + ); +} + +export function getVideoDurationMs(db: DatabaseSync, videoId: number): number { + const row = db.prepare('SELECT duration_ms FROM imm_videos WHERE video_id = ?').get(videoId) as { + duration_ms: number; + } | null; + return row?.duration_ms ?? 0; +} + +export function isVideoWatched(db: DatabaseSync, videoId: number): boolean { + const row = db.prepare('SELECT watched FROM imm_videos WHERE video_id = ?').get(videoId) as { + watched: number; + } | null; + return row?.watched === 1; +} + +export function deleteSession(db: DatabaseSync, sessionId: number): void { + const sessionIds = [sessionId]; + const affectedWordIds = getAffectedWordIdsForSessions(db, sessionIds); + const affectedKanjiIds = getAffectedKanjiIdsForSessions(db, sessionIds); + + db.exec('BEGIN IMMEDIATE'); + try { + deleteSessionsByIds(db, sessionIds); + refreshLexicalAggregates(db, affectedWordIds, affectedKanjiIds); + rebuildLifetimeSummariesInTransaction(db); + rebuildRollupsInTransaction(db); + db.exec('COMMIT'); + } catch (error) { + db.exec('ROLLBACK'); + throw error; + } +} + +export function deleteSessions(db: DatabaseSync, sessionIds: number[]): void { + if (sessionIds.length === 0) return; + const affectedWordIds = getAffectedWordIdsForSessions(db, sessionIds); + const affectedKanjiIds = getAffectedKanjiIdsForSessions(db, sessionIds); + + db.exec('BEGIN IMMEDIATE'); + try { + deleteSessionsByIds(db, sessionIds); + refreshLexicalAggregates(db, affectedWordIds, affectedKanjiIds); + rebuildLifetimeSummariesInTransaction(db); + rebuildRollupsInTransaction(db); + db.exec('COMMIT'); + } catch (error) { + db.exec('ROLLBACK'); + throw error; + } +} + +export function deleteVideo(db: DatabaseSync, videoId: number): void { + const artRow = db + .prepare( + ` + SELECT cover_blob_hash AS coverBlobHash + FROM imm_media_art + WHERE video_id = ? + `, + ) + .get(videoId) as { coverBlobHash: string | null } | undefined; + const affectedWordIds = getAffectedWordIdsForVideo(db, videoId); + const affectedKanjiIds = getAffectedKanjiIdsForVideo(db, videoId); + const sessions = db + .prepare('SELECT session_id FROM imm_sessions WHERE video_id = ?') + .all(videoId) as Array<{ session_id: number }>; + + db.exec('BEGIN IMMEDIATE'); + try { + deleteSessionsByIds( + db, + sessions.map((session) => session.session_id), + ); + db.prepare('DELETE FROM imm_subtitle_lines WHERE video_id = ?').run(videoId); + db.prepare('DELETE FROM imm_daily_rollups WHERE video_id = ?').run(videoId); + db.prepare('DELETE FROM imm_monthly_rollups WHERE video_id = ?').run(videoId); + db.prepare('DELETE FROM imm_media_art WHERE video_id = ?').run(videoId); + cleanupUnusedCoverArtBlobHash(db, artRow?.coverBlobHash ?? null); + db.prepare('DELETE FROM imm_videos WHERE video_id = ?').run(videoId); + refreshLexicalAggregates(db, affectedWordIds, affectedKanjiIds); + rebuildLifetimeSummariesInTransaction(db); + rebuildRollupsInTransaction(db); + db.exec('COMMIT'); + } catch (error) { + db.exec('ROLLBACK'); + throw error; + } +} diff --git a/src/core/services/immersion-tracker/query-sessions.ts b/src/core/services/immersion-tracker/query-sessions.ts new file mode 100644 index 0000000..50224bd --- /dev/null +++ b/src/core/services/immersion-tracker/query-sessions.ts @@ -0,0 +1,351 @@ +import type { DatabaseSync } from './sqlite'; +import { nowMs } from './time'; +import type { + ImmersionSessionRollupRow, + SessionSummaryQueryRow, + SessionTimelineRow, +} from './types'; +import { ACTIVE_SESSION_METRICS_CTE } from './query-shared'; + +export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummaryQueryRow[] { + const prepared = db.prepare(` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + s.session_id AS sessionId, + s.video_id AS videoId, + v.canonical_title AS canonicalTitle, + v.anime_id AS animeId, + a.canonical_title AS animeTitle, + s.started_at_ms AS startedAtMs, + s.ended_at_ms AS endedAtMs, + COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs, + COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, + COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen, + COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, + COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, + COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount, + COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits, + COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount + FROM imm_sessions s + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + LEFT JOIN imm_videos v ON v.video_id = s.video_id + LEFT JOIN imm_anime a ON a.anime_id = v.anime_id + ORDER BY s.started_at_ms DESC + LIMIT ? + `); + return prepared.all(limit) as unknown as SessionSummaryQueryRow[]; +} + +export function getSessionTimeline( + db: DatabaseSync, + sessionId: number, + limit?: number, +): SessionTimelineRow[] { + const select = ` + SELECT + sample_ms AS sampleMs, + total_watched_ms AS totalWatchedMs, + active_watched_ms AS activeWatchedMs, + lines_seen AS linesSeen, + tokens_seen AS tokensSeen, + cards_mined AS cardsMined + FROM imm_session_telemetry + WHERE session_id = ? + ORDER BY sample_ms DESC, telemetry_id DESC + `; + + if (limit === undefined) { + return db.prepare(select).all(sessionId) as unknown as SessionTimelineRow[]; + } + return db + .prepare(`${select}\n LIMIT ?`) + .all(sessionId, limit) as unknown as SessionTimelineRow[]; +} + +/** Returns all distinct headwords in the vocabulary table (global). */ +export function getAllDistinctHeadwords(db: DatabaseSync): string[] { + const rows = db.prepare('SELECT DISTINCT headword FROM imm_words').all() as Array<{ + headword: string; + }>; + return rows.map((r) => r.headword); +} + +/** Returns distinct headwords seen for a specific anime. */ +export function getAnimeDistinctHeadwords(db: DatabaseSync, animeId: number): string[] { + const rows = db + .prepare( + ` + SELECT DISTINCT w.headword + FROM imm_word_line_occurrences o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + JOIN imm_words w ON w.id = o.word_id + WHERE sl.anime_id = ? + `, + ) + .all(animeId) as Array<{ headword: string }>; + return rows.map((r) => r.headword); +} + +/** Returns distinct headwords seen for a specific video/media. */ +export function getMediaDistinctHeadwords(db: DatabaseSync, videoId: number): string[] { + const rows = db + .prepare( + ` + SELECT DISTINCT w.headword + FROM imm_word_line_occurrences o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + JOIN imm_words w ON w.id = o.word_id + WHERE sl.video_id = ? + `, + ) + .all(videoId) as Array<{ headword: string }>; + return rows.map((r) => r.headword); +} + +/** + * Returns the headword for each word seen in a session, grouped by line_index. + * Used to compute cumulative known-words counts for the session timeline chart. + */ +export function getSessionWordsByLine( + db: DatabaseSync, + sessionId: number, +): Array<{ lineIndex: number; headword: string; occurrenceCount: number }> { + const stmt = db.prepare(` + SELECT + sl.line_index AS lineIndex, + w.headword AS headword, + wlo.occurrence_count AS occurrenceCount + FROM imm_subtitle_lines sl + JOIN imm_word_line_occurrences wlo ON wlo.line_id = sl.line_id + JOIN imm_words w ON w.id = wlo.word_id + WHERE sl.session_id = ? + ORDER BY sl.line_index ASC + `); + return stmt.all(sessionId) as Array<{ + lineIndex: number; + headword: string; + occurrenceCount: number; + }>; +} + +function getNewWordCounts(db: DatabaseSync): { newWordsToday: number; newWordsThisWeek: number } { + const now = new Date(); + const todayStartSec = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 1000; + const weekAgoSec = + new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7).getTime() / 1000; + + const row = db + .prepare( + ` + WITH headword_first_seen AS ( + SELECT + headword, + MIN(first_seen) AS first_seen + FROM imm_words + WHERE first_seen IS NOT NULL + AND headword IS NOT NULL + AND headword != '' + GROUP BY headword + ) + SELECT + COALESCE(SUM(CASE WHEN first_seen >= ? THEN 1 ELSE 0 END), 0) AS today, + COALESCE(SUM(CASE WHEN first_seen >= ? THEN 1 ELSE 0 END), 0) AS week + FROM headword_first_seen + `, + ) + .get(todayStartSec, weekAgoSec) as { today: number; week: number } | null; + + return { + newWordsToday: Number(row?.today ?? 0), + newWordsThisWeek: Number(row?.week ?? 0), + }; +} + +export function getQueryHints(db: DatabaseSync): { + totalSessions: number; + activeSessions: number; + episodesToday: number; + activeAnimeCount: number; + totalEpisodesWatched: number; + totalAnimeCompleted: number; + totalActiveMin: number; + totalCards: number; + activeDays: number; + totalTokensSeen: number; + totalLookupCount: number; + totalLookupHits: number; + totalYomitanLookupCount: number; + newWordsToday: number; + newWordsThisWeek: number; +} { + const active = db.prepare('SELECT COUNT(*) AS total FROM imm_sessions WHERE ended_at_ms IS NULL'); + const activeSessions = Number((active.get() as { total?: number } | null)?.total ?? 0); + const lifetime = db + .prepare( + ` + SELECT + total_sessions AS totalSessions, + total_active_ms AS totalActiveMs, + total_cards AS totalCards, + active_days AS activeDays, + episodes_completed AS episodesCompleted, + anime_completed AS animeCompleted + FROM imm_lifetime_global + WHERE global_id = 1 + `, + ) + .get() as { + totalSessions: number; + totalActiveMs: number; + totalCards: number; + activeDays: number; + episodesCompleted: number; + animeCompleted: number; + } | null; + + const now = new Date(); + const todayLocal = Math.floor( + new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 86_400_000, + ); + + const episodesToday = + ( + db + .prepare( + ` + SELECT COUNT(DISTINCT s.video_id) AS count + FROM imm_sessions s + WHERE CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) = ? + `, + ) + .get(todayLocal) as { count: number } + )?.count ?? 0; + + const thirtyDaysAgoMs = nowMs() - 30 * 86400000; + const activeAnimeCount = + ( + db + .prepare( + ` + SELECT COUNT(DISTINCT v.anime_id) AS count + FROM imm_sessions s + JOIN imm_videos v ON v.video_id = s.video_id + WHERE v.anime_id IS NOT NULL + AND s.started_at_ms >= ? + `, + ) + .get(thirtyDaysAgoMs) as { count: number } + )?.count ?? 0; + + const totalEpisodesWatched = Number(lifetime?.episodesCompleted ?? 0); + const totalAnimeCompleted = Number(lifetime?.animeCompleted ?? 0); + const totalSessions = Number(lifetime?.totalSessions ?? 0); + const totalActiveMin = Math.floor(Math.max(0, lifetime?.totalActiveMs ?? 0) / 60000); + const totalCards = Number(lifetime?.totalCards ?? 0); + const activeDays = Number(lifetime?.activeDays ?? 0); + + const lookupTotals = db + .prepare( + ` + SELECT + COALESCE(SUM(COALESCE(t.tokens_seen, s.tokens_seen, 0)), 0) AS totalTokensSeen, + COALESCE(SUM(COALESCE(t.lookup_count, s.lookup_count, 0)), 0) AS totalLookupCount, + COALESCE(SUM(COALESCE(t.lookup_hits, s.lookup_hits, 0)), 0) AS totalLookupHits, + COALESCE(SUM(COALESCE(t.yomitan_lookup_count, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount + FROM imm_sessions s + LEFT JOIN ( + SELECT + session_id, + MAX(tokens_seen) AS tokens_seen, + MAX(lookup_count) AS lookup_count, + MAX(lookup_hits) AS lookup_hits, + MAX(yomitan_lookup_count) AS yomitan_lookup_count + FROM imm_session_telemetry + GROUP BY session_id + ) t ON t.session_id = s.session_id + WHERE s.ended_at_ms IS NOT NULL + `, + ) + .get() as { + totalTokensSeen: number; + totalLookupCount: number; + totalLookupHits: number; + totalYomitanLookupCount: number; + } | null; + + return { + totalSessions, + activeSessions, + episodesToday, + activeAnimeCount, + totalEpisodesWatched, + totalAnimeCompleted, + totalActiveMin, + totalCards, + activeDays, + totalTokensSeen: Number(lookupTotals?.totalTokensSeen ?? 0), + totalLookupCount: Number(lookupTotals?.totalLookupCount ?? 0), + totalLookupHits: Number(lookupTotals?.totalLookupHits ?? 0), + totalYomitanLookupCount: Number(lookupTotals?.totalYomitanLookupCount ?? 0), + ...getNewWordCounts(db), + }; +} + +export function getDailyRollups(db: DatabaseSync, limit = 60): ImmersionSessionRollupRow[] { + const prepared = db.prepare(` + WITH recent_days AS ( + SELECT DISTINCT rollup_day + FROM imm_daily_rollups + ORDER BY rollup_day DESC + LIMIT ? + ) + SELECT + r.rollup_day AS rollupDayOrMonth, + r.video_id AS videoId, + r.total_sessions AS totalSessions, + r.total_active_min AS totalActiveMin, + r.total_lines_seen AS totalLinesSeen, + r.total_tokens_seen AS totalTokensSeen, + r.total_cards AS totalCards, + r.cards_per_hour AS cardsPerHour, + r.tokens_per_min AS tokensPerMin, + r.lookup_hit_rate AS lookupHitRate + FROM imm_daily_rollups r + WHERE r.rollup_day IN (SELECT rollup_day FROM recent_days) + ORDER BY r.rollup_day DESC, r.video_id DESC + `); + + return prepared.all(limit) as unknown as ImmersionSessionRollupRow[]; +} + +export function getMonthlyRollups(db: DatabaseSync, limit = 24): ImmersionSessionRollupRow[] { + const prepared = db.prepare(` + WITH recent_months AS ( + SELECT DISTINCT rollup_month + FROM imm_monthly_rollups + ORDER BY rollup_month DESC + LIMIT ? + ) + SELECT + rollup_month AS rollupDayOrMonth, + video_id AS videoId, + total_sessions AS totalSessions, + total_active_min AS totalActiveMin, + total_lines_seen AS totalLinesSeen, + total_tokens_seen AS totalTokensSeen, + total_cards AS totalCards, + CASE + WHEN total_active_min > 0 THEN (total_cards * 60.0) / total_active_min + ELSE NULL + END AS cardsPerHour, + CASE + WHEN total_active_min > 0 THEN total_tokens_seen * 1.0 / total_active_min + ELSE NULL + END AS tokensPerMin, + NULL AS lookupHitRate + FROM imm_monthly_rollups + WHERE rollup_month IN (SELECT rollup_month FROM recent_months) + ORDER BY rollup_month DESC, video_id DESC + `); + return prepared.all(limit) as unknown as ImmersionSessionRollupRow[]; +} diff --git a/src/core/services/immersion-tracker/query-shared.ts b/src/core/services/immersion-tracker/query-shared.ts new file mode 100644 index 0000000..c5d8312 --- /dev/null +++ b/src/core/services/immersion-tracker/query-shared.ts @@ -0,0 +1,282 @@ +import type { DatabaseSync } from './sqlite'; + +export const ACTIVE_SESSION_METRICS_CTE = ` + WITH active_session_metrics AS ( + SELECT + t.session_id AS sessionId, + MAX(t.total_watched_ms) AS totalWatchedMs, + MAX(t.active_watched_ms) AS activeWatchedMs, + MAX(t.lines_seen) AS linesSeen, + MAX(t.tokens_seen) AS tokensSeen, + MAX(t.cards_mined) AS cardsMined, + MAX(t.lookup_count) AS lookupCount, + MAX(t.lookup_hits) AS lookupHits, + MAX(t.yomitan_lookup_count) AS yomitanLookupCount + FROM imm_session_telemetry t + JOIN imm_sessions s ON s.session_id = t.session_id + WHERE s.ended_at_ms IS NULL + GROUP BY t.session_id + ) +`; + +export function makePlaceholders(values: number[]): string { + return values.map(() => '?').join(','); +} + +export function resolvedCoverBlobExpr(mediaAlias: string, blobStoreAlias: string): string { + return `COALESCE(${blobStoreAlias}.cover_blob, CASE WHEN ${mediaAlias}.cover_blob_hash IS NULL THEN ${mediaAlias}.cover_blob ELSE NULL END)`; +} + +export function cleanupUnusedCoverArtBlobHash(db: DatabaseSync, blobHash: string | null): void { + if (!blobHash) { + return; + } + db.prepare( + ` + DELETE FROM imm_cover_art_blobs + WHERE blob_hash = ? + AND NOT EXISTS ( + SELECT 1 + FROM imm_media_art + WHERE cover_blob_hash = ? + ) + `, + ).run(blobHash, blobHash); +} + +export function findSharedCoverBlobHash( + db: DatabaseSync, + videoId: number, + anilistId: number | null, + coverUrl: string | null, +): string | null { + if (anilistId !== null) { + const byAnilist = db + .prepare( + ` + SELECT cover_blob_hash AS coverBlobHash + FROM imm_media_art + WHERE video_id != ? + AND anilist_id = ? + AND cover_blob_hash IS NOT NULL + ORDER BY fetched_at_ms DESC, video_id DESC + LIMIT 1 + `, + ) + .get(videoId, anilistId) as { coverBlobHash: string | null } | undefined; + if (byAnilist?.coverBlobHash) { + return byAnilist.coverBlobHash; + } + } + + if (coverUrl) { + const byUrl = db + .prepare( + ` + SELECT cover_blob_hash AS coverBlobHash + FROM imm_media_art + WHERE video_id != ? + AND cover_url = ? + AND cover_blob_hash IS NOT NULL + ORDER BY fetched_at_ms DESC, video_id DESC + LIMIT 1 + `, + ) + .get(videoId, coverUrl) as { coverBlobHash: string | null } | undefined; + return byUrl?.coverBlobHash ?? null; + } + + return null; +} + +type LexicalEntity = 'word' | 'kanji'; + +function getAffectedIdsForSessions( + db: DatabaseSync, + entity: LexicalEntity, + sessionIds: number[], +): number[] { + if (sessionIds.length === 0) return []; + const table = entity === 'word' ? 'imm_word_line_occurrences' : 'imm_kanji_line_occurrences'; + const col = `${entity}_id`; + return ( + db + .prepare( + `SELECT DISTINCT o.${col} AS id + FROM ${table} o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + WHERE sl.session_id IN (${makePlaceholders(sessionIds)})`, + ) + .all(...sessionIds) as Array<{ id: number }> + ).map((row) => row.id); +} + +function getAffectedIdsForVideo( + db: DatabaseSync, + entity: LexicalEntity, + videoId: number, +): number[] { + const table = entity === 'word' ? 'imm_word_line_occurrences' : 'imm_kanji_line_occurrences'; + const col = `${entity}_id`; + return ( + db + .prepare( + `SELECT DISTINCT o.${col} AS id + FROM ${table} o + JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + WHERE sl.video_id = ?`, + ) + .all(videoId) as Array<{ id: number }> + ).map((row) => row.id); +} + +export function getAffectedWordIdsForSessions(db: DatabaseSync, sessionIds: number[]): number[] { + return getAffectedIdsForSessions(db, 'word', sessionIds); +} + +export function getAffectedKanjiIdsForSessions(db: DatabaseSync, sessionIds: number[]): number[] { + return getAffectedIdsForSessions(db, 'kanji', sessionIds); +} + +export function getAffectedWordIdsForVideo(db: DatabaseSync, videoId: number): number[] { + return getAffectedIdsForVideo(db, 'word', videoId); +} + +export function getAffectedKanjiIdsForVideo(db: DatabaseSync, videoId: number): number[] { + return getAffectedIdsForVideo(db, 'kanji', videoId); +} + +function refreshWordAggregates(db: DatabaseSync, wordIds: number[]): void { + if (wordIds.length === 0) { + return; + } + + const rows = db + .prepare( + ` + SELECT + w.id AS wordId, + COALESCE(SUM(o.occurrence_count), 0) AS frequency, + MIN(COALESCE(sl.CREATED_DATE, sl.LAST_UPDATE_DATE)) AS firstSeen, + MAX(COALESCE(sl.LAST_UPDATE_DATE, sl.CREATED_DATE)) AS lastSeen + FROM imm_words w + LEFT JOIN imm_word_line_occurrences o ON o.word_id = w.id + LEFT JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + WHERE w.id IN (${makePlaceholders(wordIds)}) + GROUP BY w.id + `, + ) + .all(...wordIds) as Array<{ + wordId: number; + frequency: number; + firstSeen: number | null; + lastSeen: number | null; + }>; + const updateStmt = db.prepare( + ` + UPDATE imm_words + SET frequency = ?, first_seen = ?, last_seen = ? + WHERE id = ? + `, + ); + const deleteStmt = db.prepare('DELETE FROM imm_words WHERE id = ?'); + + for (const row of rows) { + if (row.frequency <= 0 || row.firstSeen === null || row.lastSeen === null) { + deleteStmt.run(row.wordId); + continue; + } + updateStmt.run( + row.frequency, + Math.floor(row.firstSeen / 1000), + Math.floor(row.lastSeen / 1000), + row.wordId, + ); + } +} + +function refreshKanjiAggregates(db: DatabaseSync, kanjiIds: number[]): void { + if (kanjiIds.length === 0) { + return; + } + + const rows = db + .prepare( + ` + SELECT + k.id AS kanjiId, + COALESCE(SUM(o.occurrence_count), 0) AS frequency, + MIN(COALESCE(sl.CREATED_DATE, sl.LAST_UPDATE_DATE)) AS firstSeen, + MAX(COALESCE(sl.LAST_UPDATE_DATE, sl.CREATED_DATE)) AS lastSeen + FROM imm_kanji k + LEFT JOIN imm_kanji_line_occurrences o ON o.kanji_id = k.id + LEFT JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id + WHERE k.id IN (${makePlaceholders(kanjiIds)}) + GROUP BY k.id + `, + ) + .all(...kanjiIds) as Array<{ + kanjiId: number; + frequency: number; + firstSeen: number | null; + lastSeen: number | null; + }>; + const updateStmt = db.prepare( + ` + UPDATE imm_kanji + SET frequency = ?, first_seen = ?, last_seen = ? + WHERE id = ? + `, + ); + const deleteStmt = db.prepare('DELETE FROM imm_kanji WHERE id = ?'); + + for (const row of rows) { + if (row.frequency <= 0 || row.firstSeen === null || row.lastSeen === null) { + deleteStmt.run(row.kanjiId); + continue; + } + updateStmt.run( + row.frequency, + Math.floor(row.firstSeen / 1000), + Math.floor(row.lastSeen / 1000), + row.kanjiId, + ); + } +} + +export function refreshLexicalAggregates( + db: DatabaseSync, + wordIds: number[], + kanjiIds: number[], +): void { + refreshWordAggregates(db, [...new Set(wordIds)]); + refreshKanjiAggregates(db, [...new Set(kanjiIds)]); +} + +export function deleteSessionsByIds(db: DatabaseSync, sessionIds: number[]): void { + if (sessionIds.length === 0) { + return; + } + + const placeholders = makePlaceholders(sessionIds); + db.prepare(`DELETE FROM imm_subtitle_lines WHERE session_id IN (${placeholders})`).run( + ...sessionIds, + ); + db.prepare(`DELETE FROM imm_session_telemetry WHERE session_id IN (${placeholders})`).run( + ...sessionIds, + ); + db.prepare(`DELETE FROM imm_session_events WHERE session_id IN (${placeholders})`).run( + ...sessionIds, + ); + db.prepare(`DELETE FROM imm_sessions WHERE session_id IN (${placeholders})`).run(...sessionIds); +} + +export function toDbMs(ms: number | bigint): bigint { + if (typeof ms === 'bigint') { + return ms; + } + if (!Number.isFinite(ms)) { + throw new TypeError(`Invalid database timestamp: ${ms}`); + } + return BigInt(Math.trunc(ms)); +} diff --git a/src/core/services/immersion-tracker/query-trends.ts b/src/core/services/immersion-tracker/query-trends.ts new file mode 100644 index 0000000..c72fae4 --- /dev/null +++ b/src/core/services/immersion-tracker/query-trends.ts @@ -0,0 +1,660 @@ +import type { DatabaseSync } from './sqlite'; +import type { ImmersionSessionRollupRow } from './types'; +import { ACTIVE_SESSION_METRICS_CTE, makePlaceholders } from './query-shared'; +import { getDailyRollups, getMonthlyRollups } from './query-sessions'; + +type TrendRange = '7d' | '30d' | '90d' | 'all'; +type TrendGroupBy = 'day' | 'month'; + +interface TrendChartPoint { + label: string; + value: number; +} + +interface TrendPerAnimePoint { + epochDay: number; + animeTitle: string; + value: number; +} + +interface TrendSessionMetricRow { + startedAtMs: number; + videoId: number | null; + canonicalTitle: string | null; + animeTitle: string | null; + activeWatchedMs: number; + tokensSeen: number; + cardsMined: number; + yomitanLookupCount: number; +} + +export interface TrendsDashboardQueryResult { + activity: { + watchTime: TrendChartPoint[]; + cards: TrendChartPoint[]; + words: TrendChartPoint[]; + sessions: TrendChartPoint[]; + }; + progress: { + watchTime: TrendChartPoint[]; + sessions: TrendChartPoint[]; + words: TrendChartPoint[]; + newWords: TrendChartPoint[]; + cards: TrendChartPoint[]; + episodes: TrendChartPoint[]; + lookups: TrendChartPoint[]; + }; + ratios: { + lookupsPerHundred: TrendChartPoint[]; + }; + animePerDay: { + episodes: TrendPerAnimePoint[]; + watchTime: TrendPerAnimePoint[]; + cards: TrendPerAnimePoint[]; + words: TrendPerAnimePoint[]; + lookups: TrendPerAnimePoint[]; + lookupsPerHundred: TrendPerAnimePoint[]; + }; + animeCumulative: { + watchTime: TrendPerAnimePoint[]; + episodes: TrendPerAnimePoint[]; + cards: TrendPerAnimePoint[]; + words: TrendPerAnimePoint[]; + }; + patterns: { + watchTimeByDayOfWeek: TrendChartPoint[]; + watchTimeByHour: TrendChartPoint[]; + }; +} + +const TREND_DAY_LIMITS: Record, number> = { + '7d': 7, + '30d': 30, + '90d': 90, +}; + +const DAY_NAMES = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']; + +function getTrendDayLimit(range: TrendRange): number { + return range === 'all' ? 365 : TREND_DAY_LIMITS[range]; +} + +function getTrendMonthlyLimit(range: TrendRange): number { + if (range === 'all') { + return 120; + } + const now = new Date(); + const cutoff = new Date( + now.getFullYear(), + now.getMonth(), + now.getDate() - (TREND_DAY_LIMITS[range] - 1), + ); + return Math.max(1, (now.getFullYear() - cutoff.getFullYear()) * 12 + now.getMonth() - cutoff.getMonth() + 1); +} + +function getTrendCutoffMs(range: TrendRange): number | null { + if (range === 'all') { + return null; + } + const dayLimit = getTrendDayLimit(range); + const now = new Date(); + const localMidnight = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime(); + return localMidnight - (dayLimit - 1) * 86_400_000; +} + +function makeTrendLabel(value: number): string { + if (value > 100_000) { + const year = Math.floor(value / 100); + const month = value % 100; + return new Date(Date.UTC(year, month - 1, 1)).toLocaleDateString(undefined, { + month: 'short', + year: '2-digit', + }); + } + + return new Date(value * 86_400_000).toLocaleDateString(undefined, { + month: 'short', + day: 'numeric', + }); +} + +function getLocalEpochDay(timestampMs: number): number { + const date = new Date(timestampMs); + return Math.floor((timestampMs - date.getTimezoneOffset() * 60_000) / 86_400_000); +} + +function getLocalDateForEpochDay(epochDay: number): Date { + const utcDate = new Date(epochDay * 86_400_000); + return new Date(utcDate.getTime() + utcDate.getTimezoneOffset() * 60_000); +} + +function getLocalMonthKey(timestampMs: number): number { + const date = new Date(timestampMs); + return date.getFullYear() * 100 + date.getMonth() + 1; +} + +function getTrendSessionWordCount(session: Pick): number { + return session.tokensSeen; +} + +function resolveTrendAnimeTitle(value: { + animeTitle: string | null; + canonicalTitle: string | null; +}): string { + return value.animeTitle ?? value.canonicalTitle ?? 'Unknown'; +} + +function accumulatePoints(points: TrendChartPoint[]): TrendChartPoint[] { + let sum = 0; + return points.map((point) => { + sum += point.value; + return { + label: point.label, + value: sum, + }; + }); +} + +function buildAggregatedTrendRows(rollups: ImmersionSessionRollupRow[]) { + const byKey = new Map< + number, + { activeMin: number; cards: number; words: number; sessions: number } + >(); + + for (const rollup of rollups) { + const existing = byKey.get(rollup.rollupDayOrMonth) ?? { + activeMin: 0, + cards: 0, + words: 0, + sessions: 0, + }; + existing.activeMin += rollup.totalActiveMin; + existing.cards += rollup.totalCards; + existing.words += rollup.totalTokensSeen; + existing.sessions += rollup.totalSessions; + byKey.set(rollup.rollupDayOrMonth, existing); + } + + return Array.from(byKey.entries()) + .sort(([left], [right]) => left - right) + .map(([key, value]) => ({ + label: makeTrendLabel(key), + activeMin: Math.round(value.activeMin), + cards: value.cards, + words: value.words, + sessions: value.sessions, + })); +} + +function buildWatchTimeByDayOfWeek(sessions: TrendSessionMetricRow[]): TrendChartPoint[] { + const totals = new Array(7).fill(0); + for (const session of sessions) { + totals[new Date(session.startedAtMs).getDay()] += session.activeWatchedMs; + } + return DAY_NAMES.map((name, index) => ({ + label: name, + value: Math.round(totals[index] / 60_000), + })); +} + +function buildWatchTimeByHour(sessions: TrendSessionMetricRow[]): TrendChartPoint[] { + const totals = new Array(24).fill(0); + for (const session of sessions) { + totals[new Date(session.startedAtMs).getHours()] += session.activeWatchedMs; + } + return totals.map((ms, index) => ({ + label: `${String(index).padStart(2, '0')}:00`, + value: Math.round(ms / 60_000), + })); +} + +function dayLabel(epochDay: number): string { + return getLocalDateForEpochDay(epochDay).toLocaleDateString(undefined, { + month: 'short', + day: 'numeric', + }); +} + +function buildSessionSeriesByDay( + sessions: TrendSessionMetricRow[], + getValue: (session: TrendSessionMetricRow) => number, +): TrendChartPoint[] { + const byDay = new Map(); + for (const session of sessions) { + const epochDay = getLocalEpochDay(session.startedAtMs); + byDay.set(epochDay, (byDay.get(epochDay) ?? 0) + getValue(session)); + } + return Array.from(byDay.entries()) + .sort(([left], [right]) => left - right) + .map(([epochDay, value]) => ({ label: dayLabel(epochDay), value })); +} + +function buildSessionSeriesByMonth( + sessions: TrendSessionMetricRow[], + getValue: (session: TrendSessionMetricRow) => number, +): TrendChartPoint[] { + const byMonth = new Map(); + for (const session of sessions) { + const monthKey = getLocalMonthKey(session.startedAtMs); + byMonth.set(monthKey, (byMonth.get(monthKey) ?? 0) + getValue(session)); + } + return Array.from(byMonth.entries()) + .sort(([left], [right]) => left - right) + .map(([monthKey, value]) => ({ label: makeTrendLabel(monthKey), value })); +} + +function buildLookupsPerHundredWords( + sessions: TrendSessionMetricRow[], + groupBy: TrendGroupBy, +): TrendChartPoint[] { + const lookupsByBucket = new Map(); + const wordsByBucket = new Map(); + + for (const session of sessions) { + const bucketKey = + groupBy === 'month' ? getLocalMonthKey(session.startedAtMs) : getLocalEpochDay(session.startedAtMs); + lookupsByBucket.set( + bucketKey, + (lookupsByBucket.get(bucketKey) ?? 0) + session.yomitanLookupCount, + ); + wordsByBucket.set( + bucketKey, + (wordsByBucket.get(bucketKey) ?? 0) + getTrendSessionWordCount(session), + ); + } + + return Array.from(lookupsByBucket.entries()) + .sort(([left], [right]) => left - right) + .map(([bucketKey, lookups]) => { + const words = wordsByBucket.get(bucketKey) ?? 0; + return { + label: groupBy === 'month' ? makeTrendLabel(bucketKey) : dayLabel(bucketKey), + value: words > 0 ? +((lookups / words) * 100).toFixed(1) : 0, + }; + }); +} + +function buildPerAnimeFromSessions( + sessions: TrendSessionMetricRow[], + getValue: (session: TrendSessionMetricRow) => number, +): TrendPerAnimePoint[] { + const byAnime = new Map>(); + + for (const session of sessions) { + const animeTitle = resolveTrendAnimeTitle(session); + const epochDay = getLocalEpochDay(session.startedAtMs); + const dayMap = byAnime.get(animeTitle) ?? new Map(); + dayMap.set(epochDay, (dayMap.get(epochDay) ?? 0) + getValue(session)); + byAnime.set(animeTitle, dayMap); + } + + const result: TrendPerAnimePoint[] = []; + for (const [animeTitle, dayMap] of byAnime) { + for (const [epochDay, value] of dayMap) { + result.push({ epochDay, animeTitle, value }); + } + } + return result; +} + +function buildLookupsPerHundredPerAnime(sessions: TrendSessionMetricRow[]): TrendPerAnimePoint[] { + const lookups = new Map>(); + const words = new Map>(); + + for (const session of sessions) { + const animeTitle = resolveTrendAnimeTitle(session); + const epochDay = getLocalEpochDay(session.startedAtMs); + + const lookupMap = lookups.get(animeTitle) ?? new Map(); + lookupMap.set(epochDay, (lookupMap.get(epochDay) ?? 0) + session.yomitanLookupCount); + lookups.set(animeTitle, lookupMap); + + const wordMap = words.get(animeTitle) ?? new Map(); + wordMap.set(epochDay, (wordMap.get(epochDay) ?? 0) + getTrendSessionWordCount(session)); + words.set(animeTitle, wordMap); + } + + const result: TrendPerAnimePoint[] = []; + for (const [animeTitle, dayMap] of lookups) { + const wordMap = words.get(animeTitle) ?? new Map(); + for (const [epochDay, lookupCount] of dayMap) { + const wordCount = wordMap.get(epochDay) ?? 0; + result.push({ + epochDay, + animeTitle, + value: wordCount > 0 ? +((lookupCount / wordCount) * 100).toFixed(1) : 0, + }); + } + } + return result; +} + +function buildCumulativePerAnime(points: TrendPerAnimePoint[]): TrendPerAnimePoint[] { + const byAnime = new Map>(); + const allDays = new Set(); + + for (const point of points) { + const dayMap = byAnime.get(point.animeTitle) ?? new Map(); + dayMap.set(point.epochDay, (dayMap.get(point.epochDay) ?? 0) + point.value); + byAnime.set(point.animeTitle, dayMap); + allDays.add(point.epochDay); + } + + const sortedDays = [...allDays].sort((left, right) => left - right); + if (sortedDays.length === 0) { + return []; + } + + const minDay = sortedDays[0]!; + const maxDay = sortedDays[sortedDays.length - 1]!; + const result: TrendPerAnimePoint[] = []; + + for (const [animeTitle, dayMap] of byAnime) { + const firstDay = Math.min(...dayMap.keys()); + let cumulative = 0; + for (let epochDay = minDay; epochDay <= maxDay; epochDay += 1) { + if (epochDay < firstDay) { + continue; + } + cumulative += dayMap.get(epochDay) ?? 0; + result.push({ epochDay, animeTitle, value: cumulative }); + } + } + + return result; +} + +function getVideoAnimeTitleMap( + db: DatabaseSync, + videoIds: Array, +): Map { + const uniqueIds = [ + ...new Set(videoIds.filter((value): value is number => typeof value === 'number')), + ]; + if (uniqueIds.length === 0) { + return new Map(); + } + + const rows = db + .prepare( + ` + SELECT + v.video_id AS videoId, + COALESCE(a.canonical_title, v.canonical_title, 'Unknown') AS animeTitle + FROM imm_videos v + LEFT JOIN imm_anime a ON a.anime_id = v.anime_id + WHERE v.video_id IN (${makePlaceholders(uniqueIds)}) + `, + ) + .all(...uniqueIds) as Array<{ videoId: number; animeTitle: string }>; + + return new Map(rows.map((row) => [row.videoId, row.animeTitle])); +} + +function resolveVideoAnimeTitle( + videoId: number | null, + titlesByVideoId: Map, +): string { + if (videoId === null) { + return 'Unknown'; + } + return titlesByVideoId.get(videoId) ?? 'Unknown'; +} + +function buildPerAnimeFromDailyRollups( + rollups: ImmersionSessionRollupRow[], + titlesByVideoId: Map, + getValue: (rollup: ImmersionSessionRollupRow) => number, +): TrendPerAnimePoint[] { + const byAnime = new Map>(); + + for (const rollup of rollups) { + const animeTitle = resolveVideoAnimeTitle(rollup.videoId, titlesByVideoId); + const dayMap = byAnime.get(animeTitle) ?? new Map(); + dayMap.set( + rollup.rollupDayOrMonth, + (dayMap.get(rollup.rollupDayOrMonth) ?? 0) + getValue(rollup), + ); + byAnime.set(animeTitle, dayMap); + } + + const result: TrendPerAnimePoint[] = []; + for (const [animeTitle, dayMap] of byAnime) { + for (const [epochDay, value] of dayMap) { + result.push({ epochDay, animeTitle, value }); + } + } + return result; +} + +function buildEpisodesPerAnimeFromDailyRollups( + rollups: ImmersionSessionRollupRow[], + titlesByVideoId: Map, +): TrendPerAnimePoint[] { + const byAnime = new Map>>(); + + for (const rollup of rollups) { + if (rollup.videoId === null) { + continue; + } + const animeTitle = resolveVideoAnimeTitle(rollup.videoId, titlesByVideoId); + const dayMap = byAnime.get(animeTitle) ?? new Map(); + const videoIds = dayMap.get(rollup.rollupDayOrMonth) ?? new Set(); + videoIds.add(rollup.videoId); + dayMap.set(rollup.rollupDayOrMonth, videoIds); + byAnime.set(animeTitle, dayMap); + } + + const result: TrendPerAnimePoint[] = []; + for (const [animeTitle, dayMap] of byAnime) { + for (const [epochDay, videoIds] of dayMap) { + result.push({ epochDay, animeTitle, value: videoIds.size }); + } + } + return result; +} + +function buildEpisodesPerDayFromDailyRollups( + rollups: ImmersionSessionRollupRow[], +): TrendChartPoint[] { + const byDay = new Map>(); + + for (const rollup of rollups) { + if (rollup.videoId === null) { + continue; + } + const videoIds = byDay.get(rollup.rollupDayOrMonth) ?? new Set(); + videoIds.add(rollup.videoId); + byDay.set(rollup.rollupDayOrMonth, videoIds); + } + + return Array.from(byDay.entries()) + .sort(([left], [right]) => left - right) + .map(([epochDay, videoIds]) => ({ + label: dayLabel(epochDay), + value: videoIds.size, + })); +} + +function buildEpisodesPerMonthFromRollups(rollups: ImmersionSessionRollupRow[]): TrendChartPoint[] { + const byMonth = new Map>(); + + for (const rollup of rollups) { + if (rollup.videoId === null) { + continue; + } + const videoIds = byMonth.get(rollup.rollupDayOrMonth) ?? new Set(); + videoIds.add(rollup.videoId); + byMonth.set(rollup.rollupDayOrMonth, videoIds); + } + + return Array.from(byMonth.entries()) + .sort(([left], [right]) => left - right) + .map(([monthKey, videoIds]) => ({ + label: makeTrendLabel(monthKey), + value: videoIds.size, + })); +} + +function getTrendSessionMetrics( + db: DatabaseSync, + cutoffMs: number | null, +): TrendSessionMetricRow[] { + const whereClause = cutoffMs === null ? '' : 'WHERE s.started_at_ms >= ?'; + const prepared = db.prepare(` + ${ACTIVE_SESSION_METRICS_CTE} + SELECT + s.started_at_ms AS startedAtMs, + s.video_id AS videoId, + v.canonical_title AS canonicalTitle, + a.canonical_title AS animeTitle, + COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, + COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, + COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, + COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount + FROM imm_sessions s + LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + LEFT JOIN imm_videos v ON v.video_id = s.video_id + LEFT JOIN imm_anime a ON a.anime_id = v.anime_id + ${whereClause} + ORDER BY s.started_at_ms ASC + `); + + return (cutoffMs === null ? prepared.all() : prepared.all(cutoffMs)) as TrendSessionMetricRow[]; +} + +function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] { + const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?'; + const prepared = db.prepare(` + SELECT + CAST(julianday(first_seen, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS epochDay, + COUNT(*) AS wordCount + FROM imm_words + WHERE first_seen IS NOT NULL + ${whereClause} + GROUP BY epochDay + ORDER BY epochDay ASC + `); + + const rows = ( + cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000)) + ) as Array<{ + epochDay: number; + wordCount: number; + }>; + + return rows.map((row) => ({ + label: dayLabel(row.epochDay), + value: row.wordCount, + })); +} + +function buildNewWordsPerMonth(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] { + const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?'; + const prepared = db.prepare(` + SELECT + CAST(strftime('%Y%m', first_seen, 'unixepoch', 'localtime') AS INTEGER) AS monthKey, + COUNT(*) AS wordCount + FROM imm_words + WHERE first_seen IS NOT NULL + ${whereClause} + GROUP BY monthKey + ORDER BY monthKey ASC + `); + + const rows = ( + cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000)) + ) as Array<{ + monthKey: number; + wordCount: number; + }>; + + return rows.map((row) => ({ + label: makeTrendLabel(row.monthKey), + value: row.wordCount, + })); +} + +export function getTrendsDashboard( + db: DatabaseSync, + range: TrendRange = '30d', + groupBy: TrendGroupBy = 'day', +): TrendsDashboardQueryResult { + const dayLimit = getTrendDayLimit(range); + const monthlyLimit = getTrendMonthlyLimit(range); + const cutoffMs = getTrendCutoffMs(range); + const useMonthlyBuckets = groupBy === 'month'; + const dailyRollups = getDailyRollups(db, dayLimit); + const monthlyRollups = getMonthlyRollups(db, monthlyLimit); + + const chartRollups = useMonthlyBuckets ? monthlyRollups : dailyRollups; + const sessions = getTrendSessionMetrics(db, cutoffMs); + const titlesByVideoId = getVideoAnimeTitleMap( + db, + dailyRollups.map((rollup) => rollup.videoId), + ); + + const aggregatedRows = buildAggregatedTrendRows(chartRollups); + const activity = { + watchTime: aggregatedRows.map((row) => ({ label: row.label, value: row.activeMin })), + cards: aggregatedRows.map((row) => ({ label: row.label, value: row.cards })), + words: aggregatedRows.map((row) => ({ label: row.label, value: row.words })), + sessions: aggregatedRows.map((row) => ({ label: row.label, value: row.sessions })), + }; + + const animePerDay = { + episodes: buildEpisodesPerAnimeFromDailyRollups(dailyRollups, titlesByVideoId), + watchTime: buildPerAnimeFromDailyRollups(dailyRollups, titlesByVideoId, (rollup) => + rollup.totalActiveMin, + ), + cards: buildPerAnimeFromDailyRollups( + dailyRollups, + titlesByVideoId, + (rollup) => rollup.totalCards, + ), + words: buildPerAnimeFromDailyRollups( + dailyRollups, + titlesByVideoId, + (rollup) => rollup.totalTokensSeen, + ), + lookups: buildPerAnimeFromSessions(sessions, (session) => session.yomitanLookupCount), + lookupsPerHundred: buildLookupsPerHundredPerAnime(sessions), + }; + + return { + activity, + progress: { + watchTime: accumulatePoints(activity.watchTime), + sessions: accumulatePoints(activity.sessions), + words: accumulatePoints(activity.words), + newWords: accumulatePoints( + useMonthlyBuckets ? buildNewWordsPerMonth(db, cutoffMs) : buildNewWordsPerDay(db, cutoffMs), + ), + cards: accumulatePoints(activity.cards), + episodes: accumulatePoints( + useMonthlyBuckets + ? buildEpisodesPerMonthFromRollups(monthlyRollups) + : buildEpisodesPerDayFromDailyRollups(dailyRollups), + ), + lookups: accumulatePoints( + useMonthlyBuckets + ? buildSessionSeriesByMonth(sessions, (session) => session.yomitanLookupCount) + : buildSessionSeriesByDay(sessions, (session) => session.yomitanLookupCount), + ), + }, + ratios: { + lookupsPerHundred: buildLookupsPerHundredWords(sessions, groupBy), + }, + animePerDay, + animeCumulative: { + watchTime: buildCumulativePerAnime(animePerDay.watchTime), + episodes: buildCumulativePerAnime(animePerDay.episodes), + cards: buildCumulativePerAnime(animePerDay.cards), + words: buildCumulativePerAnime(animePerDay.words), + }, + patterns: { + watchTimeByDayOfWeek: buildWatchTimeByDayOfWeek(sessions), + watchTimeByHour: buildWatchTimeByHour(sessions), + }, + }; +} diff --git a/src/core/services/immersion-tracker/query.ts b/src/core/services/immersion-tracker/query.ts index 8ca52ae..e45ca2e 100644 --- a/src/core/services/immersion-tracker/query.ts +++ b/src/core/services/immersion-tracker/query.ts @@ -1,2535 +1,5 @@ -import { createHash } from 'node:crypto'; -import type { DatabaseSync } from './sqlite'; -import type { - AnimeAnilistEntryRow, - AnimeDetailRow, - AnimeEpisodeRow, - AnimeLibraryRow, - AnimeWordRow, - EpisodeCardEventRow, - EpisodesPerDayRow, - ImmersionSessionRollupRow, - KanjiAnimeAppearanceRow, - KanjiDetailRow, - KanjiOccurrenceRow, - KanjiStatsRow, - KanjiWordRow, - MediaArtRow, - MediaDetailRow, - MediaLibraryRow, - NewAnimePerDayRow, - SessionEventRow, - SessionSummaryQueryRow, - SessionTimelineRow, - SimilarWordRow, - StreakCalendarRow, - VocabularyCleanupSummary, - WatchTimePerAnimeRow, - WordAnimeAppearanceRow, - WordDetailRow, - WordOccurrenceRow, - VocabularyStatsRow, -} from './types'; -import { buildCoverBlobReference, normalizeCoverBlobBytes } from './storage'; -import { rebuildLifetimeSummariesInTransaction } from './lifetime'; -import { rebuildRollupsInTransaction } from './maintenance'; -import { PartOfSpeech, type MergedToken } from '../../../types'; -import { shouldExcludeTokenFromVocabularyPersistence } from '../tokenizer/annotation-stage'; -import { deriveStoredPartOfSpeech } from '../tokenizer/part-of-speech'; - -type CleanupVocabularyRow = { - id: number; - word: string; - headword: string; - reading: string | null; - part_of_speech: string | null; - pos1: string | null; - pos2: string | null; - pos3: string | null; - first_seen: number | null; - last_seen: number | null; - frequency: number | null; -}; - -type ResolvedVocabularyPos = { - headword: string; - reading: string; - hasPosMetadata: boolean; - partOfSpeech: PartOfSpeech; - pos1: string; - pos2: string; - pos3: string; -}; - -type CleanupVocabularyStatsOptions = { - resolveLegacyPos?: (row: CleanupVocabularyRow) => Promise<{ - headword: string; - reading: string; - partOfSpeech: string; - pos1: string; - pos2: string; - pos3: string; - } | null>; -}; - -const ACTIVE_SESSION_METRICS_CTE = ` - WITH active_session_metrics AS ( - SELECT - t.session_id AS sessionId, - MAX(t.total_watched_ms) AS totalWatchedMs, - MAX(t.active_watched_ms) AS activeWatchedMs, - MAX(t.lines_seen) AS linesSeen, - MAX(t.tokens_seen) AS tokensSeen, - MAX(t.cards_mined) AS cardsMined, - MAX(t.lookup_count) AS lookupCount, - MAX(t.lookup_hits) AS lookupHits, - MAX(t.yomitan_lookup_count) AS yomitanLookupCount - FROM imm_session_telemetry t - JOIN imm_sessions s ON s.session_id = t.session_id - WHERE s.ended_at_ms IS NULL - GROUP BY t.session_id - ) -`; - -function resolvedCoverBlobExpr(mediaAlias: string, blobStoreAlias: string): string { - return `COALESCE(${blobStoreAlias}.cover_blob, CASE WHEN ${mediaAlias}.cover_blob_hash IS NULL THEN ${mediaAlias}.cover_blob ELSE NULL END)`; -} - -function cleanupUnusedCoverArtBlobHash(db: DatabaseSync, blobHash: string | null): void { - if (!blobHash) { - return; - } - db.prepare( - ` - DELETE FROM imm_cover_art_blobs - WHERE blob_hash = ? - AND NOT EXISTS ( - SELECT 1 - FROM imm_media_art - WHERE cover_blob_hash = ? - ) - `, - ).run(blobHash, blobHash); -} - -function findSharedCoverBlobHash( - db: DatabaseSync, - videoId: number, - anilistId: number | null, - coverUrl: string | null, -): string | null { - if (anilistId !== null) { - const byAnilist = db - .prepare( - ` - SELECT cover_blob_hash AS coverBlobHash - FROM imm_media_art - WHERE video_id != ? - AND anilist_id = ? - AND cover_blob_hash IS NOT NULL - ORDER BY fetched_at_ms DESC, video_id DESC - LIMIT 1 - `, - ) - .get(videoId, anilistId) as { coverBlobHash: string | null } | undefined; - if (byAnilist?.coverBlobHash) { - return byAnilist.coverBlobHash; - } - } - - if (coverUrl) { - const byUrl = db - .prepare( - ` - SELECT cover_blob_hash AS coverBlobHash - FROM imm_media_art - WHERE video_id != ? - AND cover_url = ? - AND cover_blob_hash IS NOT NULL - ORDER BY fetched_at_ms DESC, video_id DESC - LIMIT 1 - `, - ) - .get(videoId, coverUrl) as { coverBlobHash: string | null } | undefined; - return byUrl?.coverBlobHash ?? null; - } - - return null; -} - -function makePlaceholders(values: number[]): string { - return values.map(() => '?').join(','); -} - -function getAffectedWordIdsForSessions(db: DatabaseSync, sessionIds: number[]): number[] { - if (sessionIds.length === 0) { - return []; - } - - return ( - db - .prepare( - ` - SELECT DISTINCT o.word_id AS wordId - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - WHERE sl.session_id IN (${makePlaceholders(sessionIds)}) - `, - ) - .all(...sessionIds) as Array<{ wordId: number }> - ).map((row) => row.wordId); -} - -function getAffectedKanjiIdsForSessions(db: DatabaseSync, sessionIds: number[]): number[] { - if (sessionIds.length === 0) { - return []; - } - - return ( - db - .prepare( - ` - SELECT DISTINCT o.kanji_id AS kanjiId - FROM imm_kanji_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - WHERE sl.session_id IN (${makePlaceholders(sessionIds)}) - `, - ) - .all(...sessionIds) as Array<{ kanjiId: number }> - ).map((row) => row.kanjiId); -} - -function getAffectedWordIdsForVideo(db: DatabaseSync, videoId: number): number[] { - return ( - db - .prepare( - ` - SELECT DISTINCT o.word_id AS wordId - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - WHERE sl.video_id = ? - `, - ) - .all(videoId) as Array<{ wordId: number }> - ).map((row) => row.wordId); -} - -function getAffectedKanjiIdsForVideo(db: DatabaseSync, videoId: number): number[] { - return ( - db - .prepare( - ` - SELECT DISTINCT o.kanji_id AS kanjiId - FROM imm_kanji_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - WHERE sl.video_id = ? - `, - ) - .all(videoId) as Array<{ kanjiId: number }> - ).map((row) => row.kanjiId); -} - -function refreshWordAggregates(db: DatabaseSync, wordIds: number[]): void { - if (wordIds.length === 0) { - return; - } - - const rows = db - .prepare( - ` - SELECT - w.id AS wordId, - COALESCE(SUM(o.occurrence_count), 0) AS frequency, - MIN(COALESCE(sl.CREATED_DATE, sl.LAST_UPDATE_DATE)) AS firstSeen, - MAX(COALESCE(sl.LAST_UPDATE_DATE, sl.CREATED_DATE)) AS lastSeen - FROM imm_words w - LEFT JOIN imm_word_line_occurrences o ON o.word_id = w.id - LEFT JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - WHERE w.id IN (${makePlaceholders(wordIds)}) - GROUP BY w.id - `, - ) - .all(...wordIds) as Array<{ - wordId: number; - frequency: number; - firstSeen: number | null; - lastSeen: number | null; - }>; - const updateStmt = db.prepare( - ` - UPDATE imm_words - SET frequency = ?, first_seen = ?, last_seen = ? - WHERE id = ? - `, - ); - const deleteStmt = db.prepare('DELETE FROM imm_words WHERE id = ?'); - - for (const row of rows) { - if (row.frequency <= 0 || row.firstSeen === null || row.lastSeen === null) { - deleteStmt.run(row.wordId); - continue; - } - updateStmt.run(row.frequency, row.firstSeen, row.lastSeen, row.wordId); - } -} - -function refreshKanjiAggregates(db: DatabaseSync, kanjiIds: number[]): void { - if (kanjiIds.length === 0) { - return; - } - - const rows = db - .prepare( - ` - SELECT - k.id AS kanjiId, - COALESCE(SUM(o.occurrence_count), 0) AS frequency, - MIN(COALESCE(sl.CREATED_DATE, sl.LAST_UPDATE_DATE)) AS firstSeen, - MAX(COALESCE(sl.LAST_UPDATE_DATE, sl.CREATED_DATE)) AS lastSeen - FROM imm_kanji k - LEFT JOIN imm_kanji_line_occurrences o ON o.kanji_id = k.id - LEFT JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - WHERE k.id IN (${makePlaceholders(kanjiIds)}) - GROUP BY k.id - `, - ) - .all(...kanjiIds) as Array<{ - kanjiId: number; - frequency: number; - firstSeen: number | null; - lastSeen: number | null; - }>; - const updateStmt = db.prepare( - ` - UPDATE imm_kanji - SET frequency = ?, first_seen = ?, last_seen = ? - WHERE id = ? - `, - ); - const deleteStmt = db.prepare('DELETE FROM imm_kanji WHERE id = ?'); - - for (const row of rows) { - if (row.frequency <= 0 || row.firstSeen === null || row.lastSeen === null) { - deleteStmt.run(row.kanjiId); - continue; - } - updateStmt.run(row.frequency, row.firstSeen, row.lastSeen, row.kanjiId); - } -} - -function refreshLexicalAggregates(db: DatabaseSync, wordIds: number[], kanjiIds: number[]): void { - refreshWordAggregates(db, [...new Set(wordIds)]); - refreshKanjiAggregates(db, [...new Set(kanjiIds)]); -} - -function deleteSessionsByIds(db: DatabaseSync, sessionIds: number[]): void { - if (sessionIds.length === 0) { - return; - } - - const placeholders = makePlaceholders(sessionIds); - db.prepare(`DELETE FROM imm_subtitle_lines WHERE session_id IN (${placeholders})`).run( - ...sessionIds, - ); - db.prepare(`DELETE FROM imm_session_telemetry WHERE session_id IN (${placeholders})`).run( - ...sessionIds, - ); - db.prepare(`DELETE FROM imm_session_events WHERE session_id IN (${placeholders})`).run( - ...sessionIds, - ); - db.prepare(`DELETE FROM imm_sessions WHERE session_id IN (${placeholders})`).run(...sessionIds); -} - -export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummaryQueryRow[] { - const prepared = db.prepare(` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - s.session_id AS sessionId, - s.video_id AS videoId, - v.canonical_title AS canonicalTitle, - v.anime_id AS animeId, - a.canonical_title AS animeTitle, - s.started_at_ms AS startedAtMs, - s.ended_at_ms AS endedAtMs, - COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs, - COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, - COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen, - COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, - COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, - COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount, - COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits, - COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount - FROM imm_sessions s - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - LEFT JOIN imm_videos v ON v.video_id = s.video_id - LEFT JOIN imm_anime a ON a.anime_id = v.anime_id - ORDER BY s.started_at_ms DESC - LIMIT ? - `); - return prepared.all(limit) as unknown as SessionSummaryQueryRow[]; -} - -export function getSessionTimeline( - db: DatabaseSync, - sessionId: number, - limit?: number, -): SessionTimelineRow[] { - if (limit === undefined) { - const prepared = db.prepare(` - SELECT - sample_ms AS sampleMs, - total_watched_ms AS totalWatchedMs, - active_watched_ms AS activeWatchedMs, - lines_seen AS linesSeen, - tokens_seen AS tokensSeen, - cards_mined AS cardsMined - FROM imm_session_telemetry - WHERE session_id = ? - ORDER BY sample_ms DESC, telemetry_id DESC - `); - return prepared.all(sessionId) as unknown as SessionTimelineRow[]; - } - - const prepared = db.prepare(` - SELECT - sample_ms AS sampleMs, - total_watched_ms AS totalWatchedMs, - active_watched_ms AS activeWatchedMs, - lines_seen AS linesSeen, - tokens_seen AS tokensSeen, - cards_mined AS cardsMined - FROM imm_session_telemetry - WHERE session_id = ? - ORDER BY sample_ms DESC, telemetry_id DESC - LIMIT ? - `); - return prepared.all(sessionId, limit) as unknown as SessionTimelineRow[]; -} - -/** Returns all distinct headwords in the vocabulary table (global). */ -export function getAllDistinctHeadwords(db: DatabaseSync): string[] { - const rows = db.prepare('SELECT DISTINCT headword FROM imm_words').all() as Array<{ - headword: string; - }>; - return rows.map((r) => r.headword); -} - -/** Returns distinct headwords seen for a specific anime. */ -export function getAnimeDistinctHeadwords(db: DatabaseSync, animeId: number): string[] { - const rows = db - .prepare( - ` - SELECT DISTINCT w.headword - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - JOIN imm_words w ON w.id = o.word_id - WHERE sl.anime_id = ? - `, - ) - .all(animeId) as Array<{ headword: string }>; - return rows.map((r) => r.headword); -} - -/** Returns distinct headwords seen for a specific video/media. */ -export function getMediaDistinctHeadwords(db: DatabaseSync, videoId: number): string[] { - const rows = db - .prepare( - ` - SELECT DISTINCT w.headword - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - JOIN imm_words w ON w.id = o.word_id - WHERE sl.video_id = ? - `, - ) - .all(videoId) as Array<{ headword: string }>; - return rows.map((r) => r.headword); -} - -/** - * Returns the headword for each word seen in a session, grouped by line_index. - * Used to compute cumulative known-words counts for the session timeline chart. - */ -export function getSessionWordsByLine( - db: DatabaseSync, - sessionId: number, -): Array<{ lineIndex: number; headword: string; occurrenceCount: number }> { - const stmt = db.prepare(` - SELECT - sl.line_index AS lineIndex, - w.headword AS headword, - wlo.occurrence_count AS occurrenceCount - FROM imm_subtitle_lines sl - JOIN imm_word_line_occurrences wlo ON wlo.line_id = sl.line_id - JOIN imm_words w ON w.id = wlo.word_id - WHERE sl.session_id = ? - ORDER BY sl.line_index ASC - `); - return stmt.all(sessionId) as Array<{ - lineIndex: number; - headword: string; - occurrenceCount: number; - }>; -} - -export function getQueryHints(db: DatabaseSync): { - totalSessions: number; - activeSessions: number; - episodesToday: number; - activeAnimeCount: number; - totalEpisodesWatched: number; - totalAnimeCompleted: number; - totalActiveMin: number; - totalCards: number; - activeDays: number; - totalTokensSeen: number; - totalLookupCount: number; - totalLookupHits: number; - totalYomitanLookupCount: number; - newWordsToday: number; - newWordsThisWeek: number; -} { - const active = db.prepare('SELECT COUNT(*) AS total FROM imm_sessions WHERE ended_at_ms IS NULL'); - const activeSessions = Number((active.get() as { total?: number } | null)?.total ?? 0); - const lifetime = db - .prepare( - ` - SELECT - total_sessions AS totalSessions, - total_active_ms AS totalActiveMs, - total_cards AS totalCards, - active_days AS activeDays, - episodes_completed AS episodesCompleted, - anime_completed AS animeCompleted - FROM imm_lifetime_global - WHERE global_id = 1 - `, - ) - .get() as { - totalSessions: number; - totalActiveMs: number; - totalCards: number; - activeDays: number; - episodesCompleted: number; - animeCompleted: number; - } | null; - - const now = new Date(); - const todayLocal = Math.floor( - new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 86_400_000, - ); - const episodesToday = - ( - db - .prepare( - ` - SELECT COUNT(DISTINCT s.video_id) AS count - FROM imm_sessions s - WHERE CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) = ? - `, - ) - .get(todayLocal) as { count: number } - )?.count ?? 0; - - const thirtyDaysAgoMs = Date.now() - 30 * 86400000; - const activeAnimeCount = - ( - db - .prepare( - ` - SELECT COUNT(DISTINCT v.anime_id) AS count - FROM imm_sessions s - JOIN imm_videos v ON v.video_id = s.video_id - WHERE v.anime_id IS NOT NULL - AND s.started_at_ms >= ? - `, - ) - .get(thirtyDaysAgoMs) as { count: number } - )?.count ?? 0; - - const totalEpisodesWatched = Number(lifetime?.episodesCompleted ?? 0); - - const totalAnimeCompleted = Number(lifetime?.animeCompleted ?? 0); - - const totalSessions = Number(lifetime?.totalSessions ?? 0); - const totalActiveMin = Math.floor(Math.max(0, lifetime?.totalActiveMs ?? 0) / 60000); - const totalCards = Number(lifetime?.totalCards ?? 0); - const activeDays = Number(lifetime?.activeDays ?? 0); - - const lookupTotals = db - .prepare( - ` - SELECT - COALESCE(SUM(COALESCE(t.tokens_seen, s.tokens_seen, 0)), 0) AS totalTokensSeen, - COALESCE(SUM(COALESCE(t.lookup_count, s.lookup_count, 0)), 0) AS totalLookupCount, - COALESCE(SUM(COALESCE(t.lookup_hits, s.lookup_hits, 0)), 0) AS totalLookupHits, - COALESCE(SUM(COALESCE(t.yomitan_lookup_count, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount - FROM imm_sessions s - LEFT JOIN ( - SELECT - session_id, - MAX(tokens_seen) AS tokens_seen, - MAX(lookup_count) AS lookup_count, - MAX(lookup_hits) AS lookup_hits, - MAX(yomitan_lookup_count) AS yomitan_lookup_count - FROM imm_session_telemetry - GROUP BY session_id - ) t ON t.session_id = s.session_id - WHERE s.ended_at_ms IS NOT NULL - `, - ) - .get() as { - totalTokensSeen: number; - totalLookupCount: number; - totalLookupHits: number; - totalYomitanLookupCount: number; - } | null; - - return { - totalSessions, - activeSessions, - episodesToday, - activeAnimeCount, - totalEpisodesWatched, - totalAnimeCompleted, - totalActiveMin, - totalCards, - activeDays, - totalTokensSeen: Number(lookupTotals?.totalTokensSeen ?? 0), - totalLookupCount: Number(lookupTotals?.totalLookupCount ?? 0), - totalLookupHits: Number(lookupTotals?.totalLookupHits ?? 0), - totalYomitanLookupCount: Number(lookupTotals?.totalYomitanLookupCount ?? 0), - ...getNewWordCounts(db), - }; -} - -function getNewWordCounts(db: DatabaseSync): { newWordsToday: number; newWordsThisWeek: number } { - const now = new Date(); - const todayStartSec = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 1000; - const weekAgoSec = todayStartSec - 7 * 86_400; - - const row = db - .prepare( - ` - WITH headword_first_seen AS ( - SELECT - headword, - MIN(first_seen) AS first_seen - FROM imm_words - WHERE first_seen IS NOT NULL - AND headword IS NOT NULL - AND headword != '' - GROUP BY headword - ) - SELECT - COALESCE(SUM(CASE WHEN first_seen >= ? THEN 1 ELSE 0 END), 0) AS today, - COALESCE(SUM(CASE WHEN first_seen >= ? THEN 1 ELSE 0 END), 0) AS week - FROM headword_first_seen - `, - ) - .get(todayStartSec, weekAgoSec) as { today: number; week: number } | null; - - return { - newWordsToday: Number(row?.today ?? 0), - newWordsThisWeek: Number(row?.week ?? 0), - }; -} - -export function getDailyRollups(db: DatabaseSync, limit = 60): ImmersionSessionRollupRow[] { - const prepared = db.prepare(` - WITH recent_days AS ( - SELECT DISTINCT rollup_day - FROM imm_daily_rollups - ORDER BY rollup_day DESC - LIMIT ? - ) - SELECT - r.rollup_day AS rollupDayOrMonth, - r.video_id AS videoId, - r.total_sessions AS totalSessions, - r.total_active_min AS totalActiveMin, - r.total_lines_seen AS totalLinesSeen, - r.total_tokens_seen AS totalTokensSeen, - r.total_cards AS totalCards, - r.cards_per_hour AS cardsPerHour, - r.tokens_per_min AS tokensPerMin, - r.lookup_hit_rate AS lookupHitRate - FROM imm_daily_rollups r - WHERE r.rollup_day IN (SELECT rollup_day FROM recent_days) - ORDER BY r.rollup_day DESC, r.video_id DESC - `); - - return prepared.all(limit) as unknown as ImmersionSessionRollupRow[]; -} - -export function getMonthlyRollups(db: DatabaseSync, limit = 24): ImmersionSessionRollupRow[] { - const prepared = db.prepare(` - WITH recent_months AS ( - SELECT DISTINCT rollup_month - FROM imm_monthly_rollups - ORDER BY rollup_month DESC - LIMIT ? - ) - SELECT - rollup_month AS rollupDayOrMonth, - video_id AS videoId, - total_sessions AS totalSessions, - total_active_min AS totalActiveMin, - total_lines_seen AS totalLinesSeen, - total_tokens_seen AS totalTokensSeen, - total_cards AS totalCards, - 0 AS cardsPerHour, - 0 AS tokensPerMin, - 0 AS lookupHitRate - FROM imm_monthly_rollups - WHERE rollup_month IN (SELECT rollup_month FROM recent_months) - ORDER BY rollup_month DESC, video_id DESC - `); - return prepared.all(limit) as unknown as ImmersionSessionRollupRow[]; -} - -type TrendRange = '7d' | '30d' | '90d' | 'all'; -type TrendGroupBy = 'day' | 'month'; - -interface TrendChartPoint { - label: string; - value: number; -} - -interface TrendPerAnimePoint { - epochDay: number; - animeTitle: string; - value: number; -} - -interface TrendSessionMetricRow { - startedAtMs: number; - videoId: number | null; - canonicalTitle: string | null; - animeTitle: string | null; - activeWatchedMs: number; - tokensSeen: number; - cardsMined: number; - yomitanLookupCount: number; -} - -export interface TrendsDashboardQueryResult { - activity: { - watchTime: TrendChartPoint[]; - cards: TrendChartPoint[]; - words: TrendChartPoint[]; - sessions: TrendChartPoint[]; - }; - progress: { - watchTime: TrendChartPoint[]; - sessions: TrendChartPoint[]; - words: TrendChartPoint[]; - newWords: TrendChartPoint[]; - cards: TrendChartPoint[]; - episodes: TrendChartPoint[]; - lookups: TrendChartPoint[]; - }; - ratios: { - lookupsPerHundred: TrendChartPoint[]; - }; - animePerDay: { - episodes: TrendPerAnimePoint[]; - watchTime: TrendPerAnimePoint[]; - cards: TrendPerAnimePoint[]; - words: TrendPerAnimePoint[]; - lookups: TrendPerAnimePoint[]; - lookupsPerHundred: TrendPerAnimePoint[]; - }; - animeCumulative: { - watchTime: TrendPerAnimePoint[]; - episodes: TrendPerAnimePoint[]; - cards: TrendPerAnimePoint[]; - words: TrendPerAnimePoint[]; - }; - patterns: { - watchTimeByDayOfWeek: TrendChartPoint[]; - watchTimeByHour: TrendChartPoint[]; - }; -} - -const TREND_DAY_LIMITS: Record, number> = { - '7d': 7, - '30d': 30, - '90d': 90, -}; - -const DAY_NAMES = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']; - -function getTrendDayLimit(range: TrendRange): number { - return range === 'all' ? 365 : TREND_DAY_LIMITS[range]; -} - -function getTrendMonthlyLimit(range: TrendRange): number { - if (range === 'all') { - return 120; - } - return Math.max(1, Math.ceil(TREND_DAY_LIMITS[range] / 30)); -} - -function getTrendCutoffMs(range: TrendRange): number | null { - if (range === 'all') { - return null; - } - const dayLimit = getTrendDayLimit(range); - const now = new Date(); - const localMidnight = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime(); - return localMidnight - (dayLimit - 1) * 86_400_000; -} - -function makeTrendLabel(value: number): string { - if (value > 100_000) { - const year = Math.floor(value / 100); - const month = value % 100; - return new Date(Date.UTC(year, month - 1, 1)).toLocaleDateString(undefined, { - month: 'short', - year: '2-digit', - }); - } - - return new Date(value * 86_400_000).toLocaleDateString(undefined, { - month: 'short', - day: 'numeric', - }); -} - -function getTrendSessionWordCount(session: Pick): number { - return session.tokensSeen; -} - -function resolveTrendAnimeTitle(value: { - animeTitle: string | null; - canonicalTitle: string | null; -}): string { - return value.animeTitle ?? value.canonicalTitle ?? 'Unknown'; -} - -function accumulatePoints(points: TrendChartPoint[]): TrendChartPoint[] { - let sum = 0; - return points.map((point) => { - sum += point.value; - return { - label: point.label, - value: sum, - }; - }); -} - -function buildAggregatedTrendRows(rollups: ImmersionSessionRollupRow[]) { - const byKey = new Map< - number, - { activeMin: number; cards: number; words: number; sessions: number } - >(); - - for (const rollup of rollups) { - const existing = byKey.get(rollup.rollupDayOrMonth) ?? { - activeMin: 0, - cards: 0, - words: 0, - sessions: 0, - }; - existing.activeMin += Math.round(rollup.totalActiveMin); - existing.cards += rollup.totalCards; - existing.words += rollup.totalTokensSeen; - existing.sessions += rollup.totalSessions; - byKey.set(rollup.rollupDayOrMonth, existing); - } - - return Array.from(byKey.entries()) - .sort(([left], [right]) => left - right) - .map(([key, value]) => ({ - label: makeTrendLabel(key), - activeMin: value.activeMin, - cards: value.cards, - words: value.words, - sessions: value.sessions, - })); -} - -function buildWatchTimeByDayOfWeek(sessions: TrendSessionMetricRow[]): TrendChartPoint[] { - const totals = new Array(7).fill(0); - for (const session of sessions) { - totals[new Date(session.startedAtMs).getDay()] += session.activeWatchedMs; - } - return DAY_NAMES.map((name, index) => ({ - label: name, - value: Math.round(totals[index] / 60_000), - })); -} - -function buildWatchTimeByHour(sessions: TrendSessionMetricRow[]): TrendChartPoint[] { - const totals = new Array(24).fill(0); - for (const session of sessions) { - totals[new Date(session.startedAtMs).getHours()] += session.activeWatchedMs; - } - return totals.map((ms, index) => ({ - label: `${String(index).padStart(2, '0')}:00`, - value: Math.round(ms / 60_000), - })); -} - -function dayLabel(epochDay: number): string { - return new Date(epochDay * 86_400_000).toLocaleDateString(undefined, { - month: 'short', - day: 'numeric', - }); -} - -function buildSessionSeriesByDay( - sessions: TrendSessionMetricRow[], - getValue: (session: TrendSessionMetricRow) => number, -): TrendChartPoint[] { - const byDay = new Map(); - for (const session of sessions) { - const epochDay = Math.floor(session.startedAtMs / 86_400_000); - byDay.set(epochDay, (byDay.get(epochDay) ?? 0) + getValue(session)); - } - return Array.from(byDay.entries()) - .sort(([left], [right]) => left - right) - .map(([epochDay, value]) => ({ label: dayLabel(epochDay), value })); -} - -function buildLookupsPerHundredWords(sessions: TrendSessionMetricRow[]): TrendChartPoint[] { - const lookupsByDay = new Map(); - const wordsByDay = new Map(); - - for (const session of sessions) { - const epochDay = Math.floor(session.startedAtMs / 86_400_000); - lookupsByDay.set(epochDay, (lookupsByDay.get(epochDay) ?? 0) + session.yomitanLookupCount); - wordsByDay.set(epochDay, (wordsByDay.get(epochDay) ?? 0) + getTrendSessionWordCount(session)); - } - - return Array.from(lookupsByDay.entries()) - .sort(([left], [right]) => left - right) - .map(([epochDay, lookups]) => { - const words = wordsByDay.get(epochDay) ?? 0; - return { - label: dayLabel(epochDay), - value: words > 0 ? +((lookups / words) * 100).toFixed(1) : 0, - }; - }); -} - -function buildPerAnimeFromSessions( - sessions: TrendSessionMetricRow[], - getValue: (session: TrendSessionMetricRow) => number, -): TrendPerAnimePoint[] { - const byAnime = new Map>(); - - for (const session of sessions) { - const animeTitle = resolveTrendAnimeTitle(session); - const epochDay = Math.floor(session.startedAtMs / 86_400_000); - const dayMap = byAnime.get(animeTitle) ?? new Map(); - dayMap.set(epochDay, (dayMap.get(epochDay) ?? 0) + getValue(session)); - byAnime.set(animeTitle, dayMap); - } - - const result: TrendPerAnimePoint[] = []; - for (const [animeTitle, dayMap] of byAnime) { - for (const [epochDay, value] of dayMap) { - result.push({ epochDay, animeTitle, value }); - } - } - return result; -} - -function buildLookupsPerHundredPerAnime(sessions: TrendSessionMetricRow[]): TrendPerAnimePoint[] { - const lookups = new Map>(); - const words = new Map>(); - - for (const session of sessions) { - const animeTitle = resolveTrendAnimeTitle(session); - const epochDay = Math.floor(session.startedAtMs / 86_400_000); - - const lookupMap = lookups.get(animeTitle) ?? new Map(); - lookupMap.set(epochDay, (lookupMap.get(epochDay) ?? 0) + session.yomitanLookupCount); - lookups.set(animeTitle, lookupMap); - - const wordMap = words.get(animeTitle) ?? new Map(); - wordMap.set(epochDay, (wordMap.get(epochDay) ?? 0) + getTrendSessionWordCount(session)); - words.set(animeTitle, wordMap); - } - - const result: TrendPerAnimePoint[] = []; - for (const [animeTitle, dayMap] of lookups) { - const wordMap = words.get(animeTitle) ?? new Map(); - for (const [epochDay, lookupCount] of dayMap) { - const wordCount = wordMap.get(epochDay) ?? 0; - result.push({ - epochDay, - animeTitle, - value: wordCount > 0 ? +((lookupCount / wordCount) * 100).toFixed(1) : 0, - }); - } - } - return result; -} - -function buildCumulativePerAnime(points: TrendPerAnimePoint[]): TrendPerAnimePoint[] { - const byAnime = new Map>(); - const allDays = new Set(); - - for (const point of points) { - const dayMap = byAnime.get(point.animeTitle) ?? new Map(); - dayMap.set(point.epochDay, (dayMap.get(point.epochDay) ?? 0) + point.value); - byAnime.set(point.animeTitle, dayMap); - allDays.add(point.epochDay); - } - - const sortedDays = [...allDays].sort((left, right) => left - right); - if (sortedDays.length === 0) { - return []; - } - - const minDay = sortedDays[0]!; - const maxDay = sortedDays[sortedDays.length - 1]!; - const result: TrendPerAnimePoint[] = []; - - for (const [animeTitle, dayMap] of byAnime) { - const firstDay = Math.min(...dayMap.keys()); - let cumulative = 0; - for (let epochDay = minDay; epochDay <= maxDay; epochDay += 1) { - if (epochDay < firstDay) { - continue; - } - cumulative += dayMap.get(epochDay) ?? 0; - result.push({ epochDay, animeTitle, value: cumulative }); - } - } - - return result; -} - -function getVideoAnimeTitleMap( - db: DatabaseSync, - videoIds: Array, -): Map { - const uniqueIds = [ - ...new Set(videoIds.filter((value): value is number => typeof value === 'number')), - ]; - if (uniqueIds.length === 0) { - return new Map(); - } - - const rows = db - .prepare( - ` - SELECT - v.video_id AS videoId, - COALESCE(a.canonical_title, v.canonical_title, 'Unknown') AS animeTitle - FROM imm_videos v - LEFT JOIN imm_anime a ON a.anime_id = v.anime_id - WHERE v.video_id IN (${makePlaceholders(uniqueIds)}) - `, - ) - .all(...uniqueIds) as Array<{ videoId: number; animeTitle: string }>; - - return new Map(rows.map((row) => [row.videoId, row.animeTitle])); -} - -function resolveVideoAnimeTitle( - videoId: number | null, - titlesByVideoId: Map, -): string { - if (videoId === null) { - return 'Unknown'; - } - return titlesByVideoId.get(videoId) ?? 'Unknown'; -} - -function buildPerAnimeFromDailyRollups( - rollups: ImmersionSessionRollupRow[], - titlesByVideoId: Map, - getValue: (rollup: ImmersionSessionRollupRow) => number, -): TrendPerAnimePoint[] { - const byAnime = new Map>(); - - for (const rollup of rollups) { - const animeTitle = resolveVideoAnimeTitle(rollup.videoId, titlesByVideoId); - const dayMap = byAnime.get(animeTitle) ?? new Map(); - dayMap.set( - rollup.rollupDayOrMonth, - (dayMap.get(rollup.rollupDayOrMonth) ?? 0) + getValue(rollup), - ); - byAnime.set(animeTitle, dayMap); - } - - const result: TrendPerAnimePoint[] = []; - for (const [animeTitle, dayMap] of byAnime) { - for (const [epochDay, value] of dayMap) { - result.push({ epochDay, animeTitle, value }); - } - } - return result; -} - -function buildEpisodesPerAnimeFromDailyRollups( - rollups: ImmersionSessionRollupRow[], - titlesByVideoId: Map, -): TrendPerAnimePoint[] { - const byAnime = new Map>>(); - - for (const rollup of rollups) { - if (rollup.videoId === null) { - continue; - } - const animeTitle = resolveVideoAnimeTitle(rollup.videoId, titlesByVideoId); - const dayMap = byAnime.get(animeTitle) ?? new Map(); - const videoIds = dayMap.get(rollup.rollupDayOrMonth) ?? new Set(); - videoIds.add(rollup.videoId); - dayMap.set(rollup.rollupDayOrMonth, videoIds); - byAnime.set(animeTitle, dayMap); - } - - const result: TrendPerAnimePoint[] = []; - for (const [animeTitle, dayMap] of byAnime) { - for (const [epochDay, videoIds] of dayMap) { - result.push({ epochDay, animeTitle, value: videoIds.size }); - } - } - return result; -} - -function buildEpisodesPerDayFromDailyRollups( - rollups: ImmersionSessionRollupRow[], -): TrendChartPoint[] { - const byDay = new Map>(); - - for (const rollup of rollups) { - if (rollup.videoId === null) { - continue; - } - const videoIds = byDay.get(rollup.rollupDayOrMonth) ?? new Set(); - videoIds.add(rollup.videoId); - byDay.set(rollup.rollupDayOrMonth, videoIds); - } - - return Array.from(byDay.entries()) - .sort(([left], [right]) => left - right) - .map(([epochDay, videoIds]) => ({ - label: dayLabel(epochDay), - value: videoIds.size, - })); -} - -function getTrendSessionMetrics( - db: DatabaseSync, - cutoffMs: number | null, -): TrendSessionMetricRow[] { - const whereClause = cutoffMs === null ? '' : 'WHERE s.started_at_ms >= ?'; - const prepared = db.prepare(` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - s.started_at_ms AS startedAtMs, - s.video_id AS videoId, - v.canonical_title AS canonicalTitle, - a.canonical_title AS animeTitle, - COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, - COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, - COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, - COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount - FROM imm_sessions s - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - LEFT JOIN imm_videos v ON v.video_id = s.video_id - LEFT JOIN imm_anime a ON a.anime_id = v.anime_id - ${whereClause} - ORDER BY s.started_at_ms ASC - `); - - return (cutoffMs === null ? prepared.all() : prepared.all(cutoffMs)) as TrendSessionMetricRow[]; -} - -function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] { - const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?'; - const prepared = db.prepare(` - SELECT - CAST(first_seen / 86400 AS INTEGER) AS epochDay, - COUNT(*) AS wordCount - FROM imm_words - WHERE first_seen IS NOT NULL - ${whereClause} - GROUP BY epochDay - ORDER BY epochDay ASC - `); - - const rows = ( - cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000)) - ) as Array<{ - epochDay: number; - wordCount: number; - }>; - - return rows.map((row) => ({ - label: dayLabel(row.epochDay), - value: row.wordCount, - })); -} - -export function getTrendsDashboard( - db: DatabaseSync, - range: TrendRange = '30d', - groupBy: TrendGroupBy = 'day', -): TrendsDashboardQueryResult { - const dayLimit = getTrendDayLimit(range); - const monthlyLimit = getTrendMonthlyLimit(range); - const cutoffMs = getTrendCutoffMs(range); - - const chartRollups = - groupBy === 'month' ? getMonthlyRollups(db, monthlyLimit) : getDailyRollups(db, dayLimit); - const dailyRollups = getDailyRollups(db, dayLimit); - const sessions = getTrendSessionMetrics(db, cutoffMs); - const titlesByVideoId = getVideoAnimeTitleMap( - db, - dailyRollups.map((rollup) => rollup.videoId), - ); - - const aggregatedRows = buildAggregatedTrendRows(chartRollups); - const activity = { - watchTime: aggregatedRows.map((row) => ({ label: row.label, value: row.activeMin })), - cards: aggregatedRows.map((row) => ({ label: row.label, value: row.cards })), - words: aggregatedRows.map((row) => ({ label: row.label, value: row.words })), - sessions: aggregatedRows.map((row) => ({ label: row.label, value: row.sessions })), - }; - - const animePerDay = { - episodes: buildEpisodesPerAnimeFromDailyRollups(dailyRollups, titlesByVideoId), - watchTime: buildPerAnimeFromDailyRollups(dailyRollups, titlesByVideoId, (rollup) => - Math.round(rollup.totalActiveMin), - ), - cards: buildPerAnimeFromDailyRollups( - dailyRollups, - titlesByVideoId, - (rollup) => rollup.totalCards, - ), - words: buildPerAnimeFromDailyRollups( - dailyRollups, - titlesByVideoId, - (rollup) => rollup.totalTokensSeen, - ), - lookups: buildPerAnimeFromSessions(sessions, (session) => session.yomitanLookupCount), - lookupsPerHundred: buildLookupsPerHundredPerAnime(sessions), - }; - - return { - activity, - progress: { - watchTime: accumulatePoints(activity.watchTime), - sessions: accumulatePoints(activity.sessions), - words: accumulatePoints(activity.words), - newWords: accumulatePoints(buildNewWordsPerDay(db, cutoffMs)), - cards: accumulatePoints(activity.cards), - episodes: accumulatePoints(buildEpisodesPerDayFromDailyRollups(dailyRollups)), - lookups: accumulatePoints( - buildSessionSeriesByDay(sessions, (session) => session.yomitanLookupCount), - ), - }, - ratios: { - lookupsPerHundred: buildLookupsPerHundredWords(sessions), - }, - animePerDay, - animeCumulative: { - watchTime: buildCumulativePerAnime(animePerDay.watchTime), - episodes: buildCumulativePerAnime(animePerDay.episodes), - cards: buildCumulativePerAnime(animePerDay.cards), - words: buildCumulativePerAnime(animePerDay.words), - }, - patterns: { - watchTimeByDayOfWeek: buildWatchTimeByDayOfWeek(sessions), - watchTimeByHour: buildWatchTimeByHour(sessions), - }, - }; -} - -export function getVocabularyStats( - db: DatabaseSync, - limit = 100, - excludePos?: string[], -): VocabularyStatsRow[] { - const hasExclude = excludePos && excludePos.length > 0; - const placeholders = hasExclude ? excludePos.map(() => '?').join(', ') : ''; - const whereClause = hasExclude - ? `WHERE (part_of_speech IS NULL OR part_of_speech NOT IN (${placeholders}))` - : ''; - const stmt = db.prepare(` - SELECT w.id AS wordId, w.headword, w.word, w.reading, - w.part_of_speech AS partOfSpeech, w.pos1, w.pos2, w.pos3, - w.frequency, w.frequency_rank AS frequencyRank, - w.first_seen AS firstSeen, w.last_seen AS lastSeen, - COUNT(DISTINCT sl.anime_id) AS animeCount - FROM imm_words w - LEFT JOIN imm_word_line_occurrences o ON o.word_id = w.id - LEFT JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id AND sl.anime_id IS NOT NULL - ${whereClause ? whereClause.replace('part_of_speech', 'w.part_of_speech') : ''} - GROUP BY w.id - ORDER BY w.frequency DESC LIMIT ? - `); - const params = hasExclude ? [...excludePos, limit] : [limit]; - return stmt.all(...params) as VocabularyStatsRow[]; -} - -function toStoredWordToken(row: { - word: string; - headword: string; - part_of_speech: string | null; - pos1: string | null; - pos2: string | null; - pos3: string | null; -}): MergedToken { - return { - surface: row.word || row.headword || '', - reading: '', - headword: row.headword || row.word || '', - startPos: 0, - endPos: 0, - partOfSpeech: deriveStoredPartOfSpeech({ - partOfSpeech: row.part_of_speech, - pos1: row.pos1, - }), - pos1: row.pos1 ?? '', - pos2: row.pos2 ?? '', - pos3: row.pos3 ?? '', - isMerged: true, - isKnown: false, - isNPlusOneTarget: false, - }; -} - -function normalizePosField(value: string | null | undefined): string { - return typeof value === 'string' ? value.trim() : ''; -} - -function resolveStoredVocabularyPos(row: CleanupVocabularyRow): ResolvedVocabularyPos | null { - const headword = normalizePosField(row.headword); - const reading = normalizePosField(row.reading); - const partOfSpeechRaw = typeof row.part_of_speech === 'string' ? row.part_of_speech.trim() : ''; - const pos1 = normalizePosField(row.pos1); - const pos2 = normalizePosField(row.pos2); - const pos3 = normalizePosField(row.pos3); - - if (!headword && !reading && !partOfSpeechRaw && !pos1 && !pos2 && !pos3) { - return null; - } - - return { - headword: headword || normalizePosField(row.word), - reading, - hasPosMetadata: Boolean(partOfSpeechRaw || pos1 || pos2 || pos3), - partOfSpeech: deriveStoredPartOfSpeech({ - partOfSpeech: partOfSpeechRaw, - pos1, - }), - pos1, - pos2, - pos3, - }; -} - -function hasStructuredPos(pos: ResolvedVocabularyPos | null): boolean { - return Boolean(pos?.hasPosMetadata && (pos.pos1 || pos.pos2 || pos.pos3 || pos.partOfSpeech)); -} - -function needsLegacyVocabularyMetadataRepair( - row: CleanupVocabularyRow, - stored: ResolvedVocabularyPos | null, -): boolean { - if (!stored) { - return true; - } - - if (!hasStructuredPos(stored)) { - return true; - } - - if (!stored.reading) { - return true; - } - - if (!stored.headword) { - return true; - } - - return stored.headword === normalizePosField(row.word); -} - -function shouldUpdateStoredVocabularyPos( - row: CleanupVocabularyRow, - next: ResolvedVocabularyPos, -): boolean { - return ( - normalizePosField(row.headword) !== next.headword || - normalizePosField(row.reading) !== next.reading || - (next.hasPosMetadata && - (normalizePosField(row.part_of_speech) !== next.partOfSpeech || - normalizePosField(row.pos1) !== next.pos1 || - normalizePosField(row.pos2) !== next.pos2 || - normalizePosField(row.pos3) !== next.pos3)) - ); -} - -function chooseMergedPartOfSpeech( - current: string | null | undefined, - incoming: ResolvedVocabularyPos, -): string { - const normalizedCurrent = normalizePosField(current); - if ( - normalizedCurrent && - normalizedCurrent !== PartOfSpeech.other && - incoming.partOfSpeech === PartOfSpeech.other - ) { - return normalizedCurrent; - } - return incoming.partOfSpeech; -} - -async function maybeResolveLegacyVocabularyPos( - row: CleanupVocabularyRow, - options: CleanupVocabularyStatsOptions, -): Promise { - const stored = resolveStoredVocabularyPos(row); - if (!needsLegacyVocabularyMetadataRepair(row, stored) || !options.resolveLegacyPos) { - return stored; - } - - const resolved = await options.resolveLegacyPos(row); - if (resolved) { - return { - headword: normalizePosField(resolved.headword) || normalizePosField(row.word), - reading: normalizePosField(resolved.reading), - hasPosMetadata: true, - partOfSpeech: deriveStoredPartOfSpeech({ - partOfSpeech: resolved.partOfSpeech, - pos1: resolved.pos1, - }), - pos1: normalizePosField(resolved.pos1), - pos2: normalizePosField(resolved.pos2), - pos3: normalizePosField(resolved.pos3), - }; - } - - return stored; -} - -export async function cleanupVocabularyStats( - db: DatabaseSync, - options: CleanupVocabularyStatsOptions = {}, -): Promise { - const rows = db - .prepare( - `SELECT id, word, headword, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency - FROM imm_words`, - ) - .all() as CleanupVocabularyRow[]; - const findDuplicateStmt = db.prepare( - `SELECT id, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency - FROM imm_words - WHERE headword = ? AND word = ? AND reading = ? AND id != ?`, - ); - const deleteStmt = db.prepare('DELETE FROM imm_words WHERE id = ?'); - const updateStmt = db.prepare( - `UPDATE imm_words - SET headword = ?, reading = ?, part_of_speech = ?, pos1 = ?, pos2 = ?, pos3 = ? - WHERE id = ?`, - ); - const mergeWordStmt = db.prepare( - `UPDATE imm_words - SET - frequency = COALESCE(frequency, 0) + ?, - part_of_speech = ?, - pos1 = ?, - pos2 = ?, - pos3 = ?, - first_seen = MIN(COALESCE(first_seen, ?), ?), - last_seen = MAX(COALESCE(last_seen, ?), ?) - WHERE id = ?`, - ); - const moveOccurrencesStmt = db.prepare( - `INSERT INTO imm_word_line_occurrences (line_id, word_id, occurrence_count) - SELECT line_id, ?, occurrence_count - FROM imm_word_line_occurrences - WHERE word_id = ? - ON CONFLICT(line_id, word_id) DO UPDATE SET - occurrence_count = imm_word_line_occurrences.occurrence_count + excluded.occurrence_count`, - ); - const deleteOccurrencesStmt = db.prepare( - 'DELETE FROM imm_word_line_occurrences WHERE word_id = ?', - ); - let kept = 0; - let deleted = 0; - let repaired = 0; - - for (const row of rows) { - const resolvedPos = await maybeResolveLegacyVocabularyPos(row, options); - const shouldRepair = Boolean(resolvedPos && shouldUpdateStoredVocabularyPos(row, resolvedPos)); - if (resolvedPos && shouldRepair) { - const duplicate = findDuplicateStmt.get( - resolvedPos.headword, - row.word, - resolvedPos.reading, - row.id, - ) as { - id: number; - part_of_speech: string | null; - pos1: string | null; - pos2: string | null; - pos3: string | null; - first_seen: number | null; - last_seen: number | null; - frequency: number | null; - } | null; - if (duplicate) { - moveOccurrencesStmt.run(duplicate.id, row.id); - deleteOccurrencesStmt.run(row.id); - mergeWordStmt.run( - row.frequency ?? 0, - chooseMergedPartOfSpeech(duplicate.part_of_speech, resolvedPos), - normalizePosField(duplicate.pos1) || resolvedPos.pos1, - normalizePosField(duplicate.pos2) || resolvedPos.pos2, - normalizePosField(duplicate.pos3) || resolvedPos.pos3, - row.first_seen ?? duplicate.first_seen ?? 0, - row.first_seen ?? duplicate.first_seen ?? 0, - row.last_seen ?? duplicate.last_seen ?? 0, - row.last_seen ?? duplicate.last_seen ?? 0, - duplicate.id, - ); - deleteStmt.run(row.id); - repaired += 1; - deleted += 1; - continue; - } - - updateStmt.run( - resolvedPos.headword, - resolvedPos.reading, - resolvedPos.partOfSpeech, - resolvedPos.pos1, - resolvedPos.pos2, - resolvedPos.pos3, - row.id, - ); - repaired += 1; - } - - const effectiveRow = { - ...row, - headword: resolvedPos?.headword ?? row.headword, - reading: resolvedPos?.reading ?? row.reading, - part_of_speech: resolvedPos?.hasPosMetadata ? resolvedPos.partOfSpeech : row.part_of_speech, - pos1: resolvedPos?.pos1 ?? row.pos1, - pos2: resolvedPos?.pos2 ?? row.pos2, - pos3: resolvedPos?.pos3 ?? row.pos3, - }; - const missingPos = - !normalizePosField(effectiveRow.part_of_speech) && - !normalizePosField(effectiveRow.pos1) && - !normalizePosField(effectiveRow.pos2) && - !normalizePosField(effectiveRow.pos3); - if ( - missingPos || - shouldExcludeTokenFromVocabularyPersistence(toStoredWordToken(effectiveRow)) - ) { - deleteStmt.run(row.id); - deleted += 1; - continue; - } - kept += 1; - } - - return { - scanned: rows.length, - kept, - deleted, - repaired, - }; -} - -export function getKanjiStats(db: DatabaseSync, limit = 100): KanjiStatsRow[] { - const stmt = db.prepare(` - SELECT id AS kanjiId, kanji, frequency, - first_seen AS firstSeen, last_seen AS lastSeen - FROM imm_kanji ORDER BY frequency DESC LIMIT ? - `); - return stmt.all(limit) as KanjiStatsRow[]; -} - -export function getWordOccurrences( - db: DatabaseSync, - headword: string, - word: string, - reading: string, - limit = 100, - offset = 0, -): WordOccurrenceRow[] { - return db - .prepare( - ` - SELECT - l.anime_id AS animeId, - a.canonical_title AS animeTitle, - l.video_id AS videoId, - v.canonical_title AS videoTitle, - v.source_path AS sourcePath, - l.secondary_text AS secondaryText, - l.session_id AS sessionId, - l.line_index AS lineIndex, - l.segment_start_ms AS segmentStartMs, - l.segment_end_ms AS segmentEndMs, - l.text AS text, - o.occurrence_count AS occurrenceCount - FROM imm_word_line_occurrences o - JOIN imm_words w ON w.id = o.word_id - JOIN imm_subtitle_lines l ON l.line_id = o.line_id - JOIN imm_videos v ON v.video_id = l.video_id - LEFT JOIN imm_anime a ON a.anime_id = l.anime_id - WHERE w.headword = ? AND w.word = ? AND w.reading = ? - ORDER BY l.CREATED_DATE DESC, l.line_id DESC - LIMIT ? - OFFSET ? - `, - ) - .all(headword, word, reading, limit, offset) as unknown as WordOccurrenceRow[]; -} - -export function getKanjiOccurrences( - db: DatabaseSync, - kanji: string, - limit = 100, - offset = 0, -): KanjiOccurrenceRow[] { - return db - .prepare( - ` - SELECT - l.anime_id AS animeId, - a.canonical_title AS animeTitle, - l.video_id AS videoId, - v.canonical_title AS videoTitle, - v.source_path AS sourcePath, - l.secondary_text AS secondaryText, - l.session_id AS sessionId, - l.line_index AS lineIndex, - l.segment_start_ms AS segmentStartMs, - l.segment_end_ms AS segmentEndMs, - l.text AS text, - o.occurrence_count AS occurrenceCount - FROM imm_kanji_line_occurrences o - JOIN imm_kanji k ON k.id = o.kanji_id - JOIN imm_subtitle_lines l ON l.line_id = o.line_id - JOIN imm_videos v ON v.video_id = l.video_id - LEFT JOIN imm_anime a ON a.anime_id = l.anime_id - WHERE k.kanji = ? - ORDER BY l.CREATED_DATE DESC, l.line_id DESC - LIMIT ? - OFFSET ? - `, - ) - .all(kanji, limit, offset) as unknown as KanjiOccurrenceRow[]; -} - -export function getSessionEvents( - db: DatabaseSync, - sessionId: number, - limit = 500, - eventTypes?: number[], -): SessionEventRow[] { - if (!eventTypes || eventTypes.length === 0) { - const stmt = db.prepare(` - SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload - FROM imm_session_events WHERE session_id = ? ORDER BY ts_ms ASC LIMIT ? - `); - return stmt.all(sessionId, limit) as SessionEventRow[]; - } - - const placeholders = eventTypes.map(() => '?').join(', '); - const stmt = db.prepare(` - SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload - FROM imm_session_events - WHERE session_id = ? AND event_type IN (${placeholders}) - ORDER BY ts_ms ASC - LIMIT ? - `); - return stmt.all(sessionId, ...eventTypes, limit) as SessionEventRow[]; -} - -export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] { - return db - .prepare( - ` - SELECT - a.anime_id AS animeId, - a.canonical_title AS canonicalTitle, - a.anilist_id AS anilistId, - COALESCE(lm.total_sessions, 0) AS totalSessions, - COALESCE(lm.total_active_ms, 0) AS totalActiveMs, - COALESCE(lm.total_cards, 0) AS totalCards, - COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, - COUNT(DISTINCT v.video_id) AS episodeCount, - a.episodes_total AS episodesTotal, - COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs - FROM imm_anime a - JOIN imm_lifetime_anime lm ON lm.anime_id = a.anime_id - JOIN imm_videos v ON v.anime_id = a.anime_id - GROUP BY a.anime_id - ORDER BY totalActiveMs DESC, lm.last_watched_ms DESC, canonicalTitle ASC - `, - ) - .all() as unknown as AnimeLibraryRow[]; -} - -export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRow | null { - return db - .prepare( - ` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - a.anime_id AS animeId, - a.canonical_title AS canonicalTitle, - a.anilist_id AS anilistId, - a.title_romaji AS titleRomaji, - a.title_english AS titleEnglish, - a.title_native AS titleNative, - a.description AS description, - COALESCE(lm.total_sessions, 0) AS totalSessions, - COALESCE(lm.total_active_ms, 0) AS totalActiveMs, - COALESCE(lm.total_cards, 0) AS totalCards, - COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, - COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen, - COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount, - COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits, - COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, - COUNT(DISTINCT v.video_id) AS episodeCount, - COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs - FROM imm_anime a - JOIN imm_lifetime_anime lm ON lm.anime_id = a.anime_id - JOIN imm_videos v ON v.anime_id = a.anime_id - LEFT JOIN imm_sessions s ON s.video_id = v.video_id - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - WHERE a.anime_id = ? - GROUP BY a.anime_id - `, - ) - .get(animeId) as unknown as AnimeDetailRow | null; -} - -export function getAnimeAnilistEntries(db: DatabaseSync, animeId: number): AnimeAnilistEntryRow[] { - return db - .prepare( - ` - SELECT DISTINCT - m.anilist_id AS anilistId, - m.title_romaji AS titleRomaji, - m.title_english AS titleEnglish, - v.parsed_season AS season - FROM imm_videos v - JOIN imm_media_art m ON m.video_id = v.video_id - WHERE v.anime_id = ? - AND m.anilist_id IS NOT NULL - ORDER BY v.parsed_season ASC - `, - ) - .all(animeId) as unknown as AnimeAnilistEntryRow[]; -} - -export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisodeRow[] { - return db - .prepare( - ` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - v.anime_id AS animeId, - v.video_id AS videoId, - v.canonical_title AS canonicalTitle, - v.parsed_title AS parsedTitle, - v.parsed_season AS season, - v.parsed_episode AS episode, - v.duration_ms AS durationMs, - ( - SELECT COALESCE( - NULLIF(s_recent.ended_media_ms, 0), - ( - SELECT MAX(line.segment_end_ms) - FROM imm_subtitle_lines line - WHERE line.session_id = s_recent.session_id - AND line.segment_end_ms IS NOT NULL - ), - ( - SELECT MAX(event.segment_end_ms) - FROM imm_session_events event - WHERE event.session_id = s_recent.session_id - AND event.segment_end_ms IS NOT NULL - ) - ) - FROM imm_sessions s_recent - WHERE s_recent.video_id = v.video_id - AND ( - s_recent.ended_media_ms IS NOT NULL - OR EXISTS ( - SELECT 1 - FROM imm_subtitle_lines line - WHERE line.session_id = s_recent.session_id - AND line.segment_end_ms IS NOT NULL - ) - OR EXISTS ( - SELECT 1 - FROM imm_session_events event - WHERE event.session_id = s_recent.session_id - AND event.segment_end_ms IS NOT NULL - ) - ) - ORDER BY - COALESCE(s_recent.ended_at_ms, s_recent.LAST_UPDATE_DATE, s_recent.started_at_ms) DESC, - s_recent.session_id DESC - LIMIT 1 - ) AS endedMediaMs, - v.watched AS watched, - COUNT(DISTINCT s.session_id) AS totalSessions, - COALESCE(SUM(COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0)), 0) AS totalActiveMs, - COALESCE(SUM(COALESCE(asm.cardsMined, s.cards_mined, 0)), 0) AS totalCards, - COALESCE(SUM(COALESCE(asm.tokensSeen, s.tokens_seen, 0)), 0) AS totalTokensSeen, - COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, - MAX(s.started_at_ms) AS lastWatchedMs - FROM imm_videos v - JOIN imm_sessions s ON s.video_id = v.video_id - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - WHERE v.anime_id = ? - GROUP BY v.video_id - ORDER BY - CASE WHEN v.parsed_season IS NULL THEN 1 ELSE 0 END, - v.parsed_season ASC, - CASE WHEN v.parsed_episode IS NULL THEN 1 ELSE 0 END, - v.parsed_episode ASC, - v.video_id ASC - `, - ) - .all(animeId) as unknown as AnimeEpisodeRow[]; -} - -export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] { - return db - .prepare( - ` - SELECT - v.video_id AS videoId, - v.canonical_title AS canonicalTitle, - COALESCE(lm.total_sessions, 0) AS totalSessions, - COALESCE(lm.total_active_ms, 0) AS totalActiveMs, - COALESCE(lm.total_cards, 0) AS totalCards, - COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, - COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs, - yv.youtube_video_id AS youtubeVideoId, - yv.video_url AS videoUrl, - yv.video_title AS videoTitle, - yv.video_thumbnail_url AS videoThumbnailUrl, - yv.channel_id AS channelId, - yv.channel_name AS channelName, - yv.channel_url AS channelUrl, - yv.channel_thumbnail_url AS channelThumbnailUrl, - yv.uploader_id AS uploaderId, - yv.uploader_url AS uploaderUrl, - yv.description AS description, - CASE - WHEN ma.cover_blob_hash IS NOT NULL OR ma.cover_blob IS NOT NULL THEN 1 - ELSE 0 - END AS hasCoverArt - FROM imm_videos v - JOIN imm_lifetime_media lm ON lm.video_id = v.video_id - LEFT JOIN imm_media_art ma ON ma.video_id = v.video_id - LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id - ORDER BY lm.last_watched_ms DESC - `, - ) - .all() as unknown as MediaLibraryRow[]; -} - -export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRow | null { - return db - .prepare( - ` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - v.video_id AS videoId, - v.canonical_title AS canonicalTitle, - v.anime_id AS animeId, - COALESCE(lm.total_sessions, 0) AS totalSessions, - COALESCE(lm.total_active_ms, 0) AS totalActiveMs, - COALESCE(lm.total_cards, 0) AS totalCards, - COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, - COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen, - COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount, - COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits, - COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, - yv.youtube_video_id AS youtubeVideoId, - yv.video_url AS videoUrl, - yv.video_title AS videoTitle, - yv.video_thumbnail_url AS videoThumbnailUrl, - yv.channel_id AS channelId, - yv.channel_name AS channelName, - yv.channel_url AS channelUrl, - yv.channel_thumbnail_url AS channelThumbnailUrl, - yv.uploader_id AS uploaderId, - yv.uploader_url AS uploaderUrl, - yv.description AS description - FROM imm_videos v - JOIN imm_lifetime_media lm ON lm.video_id = v.video_id - LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id - LEFT JOIN imm_sessions s ON s.video_id = v.video_id - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - WHERE v.video_id = ? - GROUP BY v.video_id - `, - ) - .get(videoId) as unknown as MediaDetailRow | null; -} - -export function getMediaSessions( - db: DatabaseSync, - videoId: number, - limit = 100, -): SessionSummaryQueryRow[] { - return db - .prepare( - ` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - s.session_id AS sessionId, - s.video_id AS videoId, - v.canonical_title AS canonicalTitle, - s.started_at_ms AS startedAtMs, - s.ended_at_ms AS endedAtMs, - COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs, - COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, - COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen, - COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, - COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, - COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount, - COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits, - COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount - FROM imm_sessions s - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - LEFT JOIN imm_videos v ON v.video_id = s.video_id - WHERE s.video_id = ? - ORDER BY s.started_at_ms DESC - LIMIT ? - `, - ) - .all(videoId, limit) as unknown as SessionSummaryQueryRow[]; -} - -export function getMediaDailyRollups( - db: DatabaseSync, - videoId: number, - limit = 90, -): ImmersionSessionRollupRow[] { - return db - .prepare( - ` - WITH recent_days AS ( - SELECT DISTINCT rollup_day - FROM imm_daily_rollups - WHERE video_id = ? - ORDER BY rollup_day DESC - LIMIT ? - ) - SELECT - rollup_day AS rollupDayOrMonth, - video_id AS videoId, - total_sessions AS totalSessions, - total_active_min AS totalActiveMin, - total_lines_seen AS totalLinesSeen, - total_tokens_seen AS totalTokensSeen, - total_cards AS totalCards, - cards_per_hour AS cardsPerHour, - tokens_per_min AS tokensPerMin, - lookup_hit_rate AS lookupHitRate - FROM imm_daily_rollups - WHERE video_id = ? - AND rollup_day IN (SELECT rollup_day FROM recent_days) - ORDER BY rollup_day DESC, video_id DESC - `, - ) - .all(videoId, limit, videoId) as unknown as ImmersionSessionRollupRow[]; -} - -export function getAnimeDailyRollups( - db: DatabaseSync, - animeId: number, - limit = 90, -): ImmersionSessionRollupRow[] { - return db - .prepare( - ` - WITH recent_days AS ( - SELECT DISTINCT r.rollup_day - FROM imm_daily_rollups r - JOIN imm_videos v ON v.video_id = r.video_id - WHERE v.anime_id = ? - ORDER BY r.rollup_day DESC - LIMIT ? - ) - SELECT r.rollup_day AS rollupDayOrMonth, r.video_id AS videoId, - r.total_sessions AS totalSessions, r.total_active_min AS totalActiveMin, - r.total_lines_seen AS totalLinesSeen, - r.total_tokens_seen AS totalTokensSeen, r.total_cards AS totalCards, - r.cards_per_hour AS cardsPerHour, r.tokens_per_min AS tokensPerMin, - r.lookup_hit_rate AS lookupHitRate - FROM imm_daily_rollups r - JOIN imm_videos v ON v.video_id = r.video_id - WHERE v.anime_id = ? - AND r.rollup_day IN (SELECT rollup_day FROM recent_days) - ORDER BY r.rollup_day DESC, r.video_id DESC - `, - ) - .all(animeId, limit, animeId) as unknown as ImmersionSessionRollupRow[]; -} - -export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow | null { - const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab'); - return db - .prepare( - ` - SELECT - a.video_id AS videoId, - a.anilist_id AS anilistId, - a.cover_url AS coverUrl, - ${resolvedCoverBlob} AS coverBlob, - a.title_romaji AS titleRomaji, - a.title_english AS titleEnglish, - a.episodes_total AS episodesTotal, - a.fetched_at_ms AS fetchedAtMs - FROM imm_media_art a - JOIN imm_videos v ON v.video_id = a.video_id - LEFT JOIN imm_cover_art_blobs cab ON cab.blob_hash = a.cover_blob_hash - WHERE v.anime_id = ? - AND ${resolvedCoverBlob} IS NOT NULL - ORDER BY a.fetched_at_ms DESC, a.video_id DESC - LIMIT 1 - `, - ) - .get(animeId) as unknown as MediaArtRow | null; -} - -export function getCoverArt(db: DatabaseSync, videoId: number): MediaArtRow | null { - const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab'); - return db - .prepare( - ` - SELECT - a.video_id AS videoId, - a.anilist_id AS anilistId, - a.cover_url AS coverUrl, - ${resolvedCoverBlob} AS coverBlob, - a.title_romaji AS titleRomaji, - a.title_english AS titleEnglish, - a.episodes_total AS episodesTotal, - a.fetched_at_ms AS fetchedAtMs - FROM imm_media_art a - LEFT JOIN imm_cover_art_blobs cab ON cab.blob_hash = a.cover_blob_hash - WHERE a.video_id = ? - `, - ) - .get(videoId) as unknown as MediaArtRow | null; -} - -export function getStreakCalendar(db: DatabaseSync, days = 90): StreakCalendarRow[] { - const now = new Date(); - const localMidnight = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime(); - const todayLocalDay = Math.floor(localMidnight / 86_400_000); - const cutoffDay = todayLocalDay - days; - return db - .prepare( - ` - SELECT rollup_day AS epochDay, SUM(total_active_min) AS totalActiveMin - FROM imm_daily_rollups - WHERE rollup_day >= ? - GROUP BY rollup_day - ORDER BY rollup_day ASC - `, - ) - .all(cutoffDay) as StreakCalendarRow[]; -} - -export function getAnimeWords(db: DatabaseSync, animeId: number, limit = 50): AnimeWordRow[] { - return db - .prepare( - ` - SELECT w.id AS wordId, w.headword, w.word, w.reading, w.part_of_speech AS partOfSpeech, - SUM(o.occurrence_count) AS frequency - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - JOIN imm_words w ON w.id = o.word_id - WHERE sl.anime_id = ? - GROUP BY w.id - ORDER BY frequency DESC - LIMIT ? - `, - ) - .all(animeId, limit) as unknown as AnimeWordRow[]; -} - -export function getEpisodesPerDay(db: DatabaseSync, limit = 90): EpisodesPerDayRow[] { - return db - .prepare( - ` - SELECT CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS epochDay, - COUNT(DISTINCT s.video_id) AS episodeCount - FROM imm_sessions s - GROUP BY epochDay - ORDER BY epochDay DESC - LIMIT ? - `, - ) - .all(limit) as EpisodesPerDayRow[]; -} - -export function getNewAnimePerDay(db: DatabaseSync, limit = 90): NewAnimePerDayRow[] { - return db - .prepare( - ` - SELECT first_day AS epochDay, COUNT(*) AS newAnimeCount - FROM ( - SELECT CAST(julianday(MIN(s.started_at_ms) / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS first_day - FROM imm_sessions s - JOIN imm_videos v ON v.video_id = s.video_id - WHERE v.anime_id IS NOT NULL - GROUP BY v.anime_id - ) - GROUP BY first_day - ORDER BY first_day DESC - LIMIT ? - `, - ) - .all(limit) as NewAnimePerDayRow[]; -} - -export function getWatchTimePerAnime(db: DatabaseSync, limit = 90): WatchTimePerAnimeRow[] { - const nowD = new Date(); - const cutoffDay = - Math.floor( - new Date(nowD.getFullYear(), nowD.getMonth(), nowD.getDate()).getTime() / 86_400_000, - ) - limit; - return db - .prepare( - ` - SELECT r.rollup_day AS epochDay, a.anime_id AS animeId, - a.canonical_title AS animeTitle, - SUM(r.total_active_min) AS totalActiveMin - FROM imm_daily_rollups r - JOIN imm_videos v ON v.video_id = r.video_id - JOIN imm_anime a ON a.anime_id = v.anime_id - WHERE r.rollup_day >= ? - GROUP BY r.rollup_day, a.anime_id - ORDER BY r.rollup_day ASC - `, - ) - .all(cutoffDay) as WatchTimePerAnimeRow[]; -} - -export function getWordDetail(db: DatabaseSync, wordId: number): WordDetailRow | null { - return db - .prepare( - ` - SELECT id AS wordId, headword, word, reading, - part_of_speech AS partOfSpeech, pos1, pos2, pos3, - frequency, first_seen AS firstSeen, last_seen AS lastSeen - FROM imm_words WHERE id = ? - `, - ) - .get(wordId) as WordDetailRow | null; -} - -export function getWordAnimeAppearances( - db: DatabaseSync, - wordId: number, -): WordAnimeAppearanceRow[] { - return db - .prepare( - ` - SELECT a.anime_id AS animeId, a.canonical_title AS animeTitle, - SUM(o.occurrence_count) AS occurrenceCount - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - JOIN imm_anime a ON a.anime_id = sl.anime_id - WHERE o.word_id = ? AND sl.anime_id IS NOT NULL - GROUP BY a.anime_id - ORDER BY occurrenceCount DESC - `, - ) - .all(wordId) as WordAnimeAppearanceRow[]; -} - -export function getSimilarWords(db: DatabaseSync, wordId: number, limit = 10): SimilarWordRow[] { - const word = db.prepare('SELECT headword, reading FROM imm_words WHERE id = ?').get(wordId) as { - headword: string; - reading: string; - } | null; - if (!word) return []; - return db - .prepare( - ` - SELECT id AS wordId, headword, word, reading, frequency - FROM imm_words - WHERE id != ? - AND (reading = ? OR headword LIKE ? OR headword LIKE ?) - ORDER BY frequency DESC - LIMIT ? - `, - ) - .all( - wordId, - word.reading, - `%${word.headword.charAt(0)}%`, - `%${word.headword.charAt(word.headword.length - 1)}%`, - limit, - ) as SimilarWordRow[]; -} - -export function getKanjiDetail(db: DatabaseSync, kanjiId: number): KanjiDetailRow | null { - return db - .prepare( - ` - SELECT id AS kanjiId, kanji, frequency, first_seen AS firstSeen, last_seen AS lastSeen - FROM imm_kanji WHERE id = ? - `, - ) - .get(kanjiId) as KanjiDetailRow | null; -} - -export function getKanjiAnimeAppearances( - db: DatabaseSync, - kanjiId: number, -): KanjiAnimeAppearanceRow[] { - return db - .prepare( - ` - SELECT a.anime_id AS animeId, a.canonical_title AS animeTitle, - SUM(o.occurrence_count) AS occurrenceCount - FROM imm_kanji_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - JOIN imm_anime a ON a.anime_id = sl.anime_id - WHERE o.kanji_id = ? AND sl.anime_id IS NOT NULL - GROUP BY a.anime_id - ORDER BY occurrenceCount DESC - `, - ) - .all(kanjiId) as KanjiAnimeAppearanceRow[]; -} - -export function getKanjiWords(db: DatabaseSync, kanjiId: number, limit = 20): KanjiWordRow[] { - const kanjiRow = db.prepare('SELECT kanji FROM imm_kanji WHERE id = ?').get(kanjiId) as { - kanji: string; - } | null; - if (!kanjiRow) return []; - return db - .prepare( - ` - SELECT id AS wordId, headword, word, reading, frequency - FROM imm_words - WHERE headword LIKE ? - ORDER BY frequency DESC - LIMIT ? - `, - ) - .all(`%${kanjiRow.kanji}%`, limit) as KanjiWordRow[]; -} - -export function getEpisodeWords(db: DatabaseSync, videoId: number, limit = 50): AnimeWordRow[] { - return db - .prepare( - ` - SELECT w.id AS wordId, w.headword, w.word, w.reading, w.part_of_speech AS partOfSpeech, - SUM(o.occurrence_count) AS frequency - FROM imm_word_line_occurrences o - JOIN imm_subtitle_lines sl ON sl.line_id = o.line_id - JOIN imm_words w ON w.id = o.word_id - WHERE sl.video_id = ? - GROUP BY w.id - ORDER BY frequency DESC - LIMIT ? - `, - ) - .all(videoId, limit) as unknown as AnimeWordRow[]; -} - -export function getEpisodeSessions(db: DatabaseSync, videoId: number): SessionSummaryQueryRow[] { - return db - .prepare( - ` - ${ACTIVE_SESSION_METRICS_CTE} - SELECT - s.session_id AS sessionId, s.video_id AS videoId, - v.canonical_title AS canonicalTitle, - s.started_at_ms AS startedAtMs, s.ended_at_ms AS endedAtMs, - COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs, - COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs, - COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen, - COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen, - COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined, - COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount, - COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits, - COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount - FROM imm_sessions s - JOIN imm_videos v ON v.video_id = s.video_id - LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id - WHERE s.video_id = ? - ORDER BY s.started_at_ms DESC - `, - ) - .all(videoId) as SessionSummaryQueryRow[]; -} - -export function getEpisodeCardEvents(db: DatabaseSync, videoId: number): EpisodeCardEventRow[] { - const rows = db - .prepare( - ` - SELECT e.event_id AS eventId, e.session_id AS sessionId, - e.ts_ms AS tsMs, e.cards_delta AS cardsDelta, - e.payload_json AS payloadJson - FROM imm_session_events e - JOIN imm_sessions s ON s.session_id = e.session_id - WHERE s.video_id = ? AND e.event_type = 4 - ORDER BY e.ts_ms DESC - `, - ) - .all(videoId) as Array<{ - eventId: number; - sessionId: number; - tsMs: number; - cardsDelta: number; - payloadJson: string | null; - }>; - - return rows.map((row) => { - let noteIds: number[] = []; - if (row.payloadJson) { - try { - const parsed = JSON.parse(row.payloadJson); - if (Array.isArray(parsed.noteIds)) noteIds = parsed.noteIds; - } catch {} - } - return { - eventId: row.eventId, - sessionId: row.sessionId, - tsMs: row.tsMs, - cardsDelta: row.cardsDelta, - noteIds, - }; - }); -} - -export function upsertCoverArt( - db: DatabaseSync, - videoId: number, - art: { - anilistId: number | null; - coverUrl: string | null; - coverBlob: ArrayBuffer | Uint8Array | Buffer | null; - titleRomaji: string | null; - titleEnglish: string | null; - episodesTotal: number | null; - }, -): void { - const existing = db - .prepare( - ` - SELECT cover_blob_hash AS coverBlobHash - FROM imm_media_art - WHERE video_id = ? - `, - ) - .get(videoId) as { coverBlobHash: string | null } | undefined; - const sharedCoverBlobHash = findSharedCoverBlobHash(db, videoId, art.anilistId, art.coverUrl); - const nowMs = Date.now(); - const coverBlob = normalizeCoverBlobBytes(art.coverBlob); - let coverBlobHash = sharedCoverBlobHash ?? null; - if (!coverBlobHash && coverBlob && coverBlob.length > 0) { - coverBlobHash = createHash('sha256').update(coverBlob).digest('hex'); - } - if (!coverBlobHash && (!coverBlob || coverBlob.length === 0)) { - coverBlobHash = existing?.coverBlobHash ?? null; - } - - if (coverBlobHash && coverBlob && coverBlob.length > 0 && !sharedCoverBlobHash) { - db.prepare( - ` - INSERT INTO imm_cover_art_blobs (blob_hash, cover_blob, CREATED_DATE, LAST_UPDATE_DATE) - VALUES (?, ?, ?, ?) - ON CONFLICT(blob_hash) DO UPDATE SET - LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE - `, - ).run(coverBlobHash, coverBlob, nowMs, nowMs); - } - - db.prepare( - ` - INSERT INTO imm_media_art ( - video_id, anilist_id, cover_url, cover_blob, cover_blob_hash, - title_romaji, title_english, episodes_total, - fetched_at_ms, CREATED_DATE, LAST_UPDATE_DATE - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - ON CONFLICT(video_id) DO UPDATE SET - anilist_id = excluded.anilist_id, - cover_url = excluded.cover_url, - cover_blob = excluded.cover_blob, - cover_blob_hash = excluded.cover_blob_hash, - title_romaji = excluded.title_romaji, - title_english = excluded.title_english, - episodes_total = excluded.episodes_total, - fetched_at_ms = excluded.fetched_at_ms, - LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE - `, - ).run( - videoId, - art.anilistId, - art.coverUrl, - coverBlobHash ? buildCoverBlobReference(coverBlobHash) : coverBlob, - coverBlobHash, - art.titleRomaji, - art.titleEnglish, - art.episodesTotal, - nowMs, - nowMs, - nowMs, - ); - - if (existing?.coverBlobHash !== coverBlobHash) { - cleanupUnusedCoverArtBlobHash(db, existing?.coverBlobHash ?? null); - } -} - -export function updateAnimeAnilistInfo( - db: DatabaseSync, - videoId: number, - info: { - anilistId: number; - titleRomaji: string | null; - titleEnglish: string | null; - titleNative: string | null; - episodesTotal: number | null; - }, -): void { - const row = db.prepare('SELECT anime_id FROM imm_videos WHERE video_id = ?').get(videoId) as { - anime_id: number | null; - } | null; - if (!row?.anime_id) return; - - db.prepare( - ` - UPDATE imm_anime - SET - anilist_id = COALESCE(?, anilist_id), - title_romaji = COALESCE(?, title_romaji), - title_english = COALESCE(?, title_english), - title_native = COALESCE(?, title_native), - episodes_total = COALESCE(?, episodes_total), - LAST_UPDATE_DATE = ? - WHERE anime_id = ? - `, - ).run( - info.anilistId, - info.titleRomaji, - info.titleEnglish, - info.titleNative, - info.episodesTotal, - Date.now(), - row.anime_id, - ); -} - -export function markVideoWatched(db: DatabaseSync, videoId: number, watched: boolean): void { - db.prepare('UPDATE imm_videos SET watched = ?, LAST_UPDATE_DATE = ? WHERE video_id = ?').run( - watched ? 1 : 0, - Date.now(), - videoId, - ); -} - -export function getVideoDurationMs(db: DatabaseSync, videoId: number): number { - const row = db.prepare('SELECT duration_ms FROM imm_videos WHERE video_id = ?').get(videoId) as { - duration_ms: number; - } | null; - return row?.duration_ms ?? 0; -} - -export function isVideoWatched(db: DatabaseSync, videoId: number): boolean { - const row = db.prepare('SELECT watched FROM imm_videos WHERE video_id = ?').get(videoId) as { - watched: number; - } | null; - return row?.watched === 1; -} - -export function deleteSession(db: DatabaseSync, sessionId: number): void { - const sessionIds = [sessionId]; - const affectedWordIds = getAffectedWordIdsForSessions(db, sessionIds); - const affectedKanjiIds = getAffectedKanjiIdsForSessions(db, sessionIds); - - db.exec('BEGIN IMMEDIATE'); - try { - deleteSessionsByIds(db, sessionIds); - refreshLexicalAggregates(db, affectedWordIds, affectedKanjiIds); - rebuildLifetimeSummariesInTransaction(db); - rebuildRollupsInTransaction(db); - db.exec('COMMIT'); - } catch (error) { - db.exec('ROLLBACK'); - throw error; - } -} - -export function deleteSessions(db: DatabaseSync, sessionIds: number[]): void { - if (sessionIds.length === 0) return; - const affectedWordIds = getAffectedWordIdsForSessions(db, sessionIds); - const affectedKanjiIds = getAffectedKanjiIdsForSessions(db, sessionIds); - - db.exec('BEGIN IMMEDIATE'); - try { - deleteSessionsByIds(db, sessionIds); - refreshLexicalAggregates(db, affectedWordIds, affectedKanjiIds); - rebuildLifetimeSummariesInTransaction(db); - rebuildRollupsInTransaction(db); - db.exec('COMMIT'); - } catch (error) { - db.exec('ROLLBACK'); - throw error; - } -} - -export function deleteVideo(db: DatabaseSync, videoId: number): void { - const artRow = db - .prepare( - ` - SELECT cover_blob_hash AS coverBlobHash - FROM imm_media_art - WHERE video_id = ? - `, - ) - .get(videoId) as { coverBlobHash: string | null } | undefined; - const affectedWordIds = getAffectedWordIdsForVideo(db, videoId); - const affectedKanjiIds = getAffectedKanjiIdsForVideo(db, videoId); - const sessions = db - .prepare('SELECT session_id FROM imm_sessions WHERE video_id = ?') - .all(videoId) as Array<{ session_id: number }>; - - db.exec('BEGIN IMMEDIATE'); - try { - deleteSessionsByIds( - db, - sessions.map((session) => session.session_id), - ); - db.prepare('DELETE FROM imm_subtitle_lines WHERE video_id = ?').run(videoId); - db.prepare('DELETE FROM imm_daily_rollups WHERE video_id = ?').run(videoId); - db.prepare('DELETE FROM imm_monthly_rollups WHERE video_id = ?').run(videoId); - db.prepare('DELETE FROM imm_media_art WHERE video_id = ?').run(videoId); - cleanupUnusedCoverArtBlobHash(db, artRow?.coverBlobHash ?? null); - db.prepare('DELETE FROM imm_videos WHERE video_id = ?').run(videoId); - refreshLexicalAggregates(db, affectedWordIds, affectedKanjiIds); - rebuildLifetimeSummariesInTransaction(db); - rebuildRollupsInTransaction(db); - db.exec('COMMIT'); - } catch (error) { - db.exec('ROLLBACK'); - throw error; - } -} +export * from './query-sessions'; +export * from './query-trends'; +export * from './query-lexical'; +export * from './query-library'; +export * from './query-maintenance'; diff --git a/src/core/services/immersion-tracker/session.ts b/src/core/services/immersion-tracker/session.ts index 7f5cb94..b0484f6 100644 --- a/src/core/services/immersion-tracker/session.ts +++ b/src/core/services/immersion-tracker/session.ts @@ -1,16 +1,18 @@ import crypto from 'node:crypto'; import type { DatabaseSync } from './sqlite'; import { createInitialSessionState } from './reducer'; +import { nowMs } from './time'; import { SESSION_STATUS_ACTIVE, SESSION_STATUS_ENDED } from './types'; import type { SessionState } from './types'; +import { toDbMs } from './query-shared'; export function startSessionRecord( db: DatabaseSync, videoId: number, - startedAtMs = Date.now(), + startedAtMs = nowMs(), ): { sessionId: number; state: SessionState } { const sessionUuid = crypto.randomUUID(); - const nowMs = Date.now(); + const createdAtMs = nowMs(); const result = db .prepare( ` @@ -20,7 +22,14 @@ export function startSessionRecord( ) VALUES (?, ?, ?, ?, ?, ?) `, ) - .run(sessionUuid, videoId, startedAtMs, SESSION_STATUS_ACTIVE, startedAtMs, nowMs); + .run( + sessionUuid, + videoId, + toDbMs(startedAtMs), + SESSION_STATUS_ACTIVE, + toDbMs(startedAtMs), + toDbMs(createdAtMs), + ); const sessionId = Number(result.lastInsertRowid); return { sessionId, @@ -31,7 +40,7 @@ export function startSessionRecord( export function finalizeSessionRecord( db: DatabaseSync, sessionState: SessionState, - endedAtMs = Date.now(), + endedAtMs = nowMs(), ): void { db.prepare( ` @@ -57,9 +66,9 @@ export function finalizeSessionRecord( WHERE session_id = ? `, ).run( - endedAtMs, + toDbMs(endedAtMs), SESSION_STATUS_ENDED, - sessionState.lastMediaMs, + sessionState.lastMediaMs === null ? null : toDbMs(sessionState.lastMediaMs), sessionState.totalWatchedMs, sessionState.activeWatchedMs, sessionState.linesSeen, @@ -73,7 +82,7 @@ export function finalizeSessionRecord( sessionState.seekForwardCount, sessionState.seekBackwardCount, sessionState.mediaBufferEvents, - Date.now(), + toDbMs(nowMs()), sessionState.sessionId, ); } diff --git a/src/core/services/immersion-tracker/storage-session.test.ts b/src/core/services/immersion-tracker/storage-session.test.ts index 21b404b..d00e09b 100644 --- a/src/core/services/immersion-tracker/storage-session.test.ts +++ b/src/core/services/immersion-tracker/storage-session.test.ts @@ -219,7 +219,9 @@ test('ensureSchema adds youtube metadata table to existing schema version 15 dat const tables = new Set( ( - db.prepare(`SELECT name FROM sqlite_master WHERE type = 'table' AND name LIKE 'imm_%'`).all() as Array<{ + db + .prepare(`SELECT name FROM sqlite_master WHERE type = 'table' AND name LIKE 'imm_%'`) + .all() as Array<{ name: string; }> ).map((row) => row.name), @@ -899,7 +901,8 @@ test('youtube videos can be regrouped under a shared channel anime identity', () channelId: 'UC123', channelName: 'Channel Name', channelUrl: 'https://www.youtube.com/channel/UC123', - channelThumbnailUrl: 'https://yt3.googleusercontent.com/channel-123=s176-c-k-c0x00ffffff-no-rj', + channelThumbnailUrl: + 'https://yt3.googleusercontent.com/channel-123=s176-c-k-c0x00ffffff-no-rj', uploaderId: '@channelname', uploaderUrl: 'https://www.youtube.com/@channelname', description: null, @@ -913,7 +916,8 @@ test('youtube videos can be regrouped under a shared channel anime identity', () channelId: 'UC123', channelName: 'Channel Name', channelUrl: 'https://www.youtube.com/channel/UC123', - channelThumbnailUrl: 'https://yt3.googleusercontent.com/channel-123=s176-c-k-c0x00ffffff-no-rj', + channelThumbnailUrl: + 'https://yt3.googleusercontent.com/channel-123=s176-c-k-c0x00ffffff-no-rj', uploaderId: '@channelname', uploaderUrl: 'https://www.youtube.com/@channelname', description: null, @@ -1074,6 +1078,56 @@ test('executeQueuedWrite inserts event and telemetry rows', () => { } }); +test('executeQueuedWrite rejects partial telemetry writes instead of zero-filling', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + try { + ensureSchema(db); + const stmts = createTrackerPreparedStatements(db); + const videoId = getOrCreateVideoRecord(db, 'local:/tmp/partial-telemetry.mkv', { + canonicalTitle: 'Partial Telemetry', + sourcePath: '/tmp/partial-telemetry.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const { sessionId } = startSessionRecord(db, videoId, 5_000); + + assert.throws( + () => + executeQueuedWrite( + { + kind: 'telemetry', + sessionId, + sampleMs: 6_000, + totalWatchedMs: 1_000, + activeWatchedMs: 900, + linesSeen: 3, + cardsMined: 1, + lookupCount: 2, + lookupHits: 1, + yomitanLookupCount: 0, + pauseCount: 1, + pauseMs: 50, + seekForwardCount: 0, + seekBackwardCount: 0, + mediaBufferEvents: 0, + }, + stmts, + ), + /Incomplete telemetry write/, + ); + + const telemetryCount = db + .prepare('SELECT COUNT(*) AS total FROM imm_session_telemetry WHERE session_id = ?') + .get(sessionId) as { total: number }; + assert.equal(telemetryCount.total, 0); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + test('executeQueuedWrite inserts and upserts word and kanji rows', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); diff --git a/src/core/services/immersion-tracker/storage.ts b/src/core/services/immersion-tracker/storage.ts index 5e4b85e..ce8833c 100644 --- a/src/core/services/immersion-tracker/storage.ts +++ b/src/core/services/immersion-tracker/storage.ts @@ -1,8 +1,10 @@ import { createHash } from 'node:crypto'; import { parseMediaInfo } from '../../../jimaku/utils'; import type { DatabaseSync } from './sqlite'; +import { nowMs } from './time'; import { SCHEMA_VERSION } from './types'; import type { QueuedWrite, VideoMetadata, YoutubeVideoMetadata } from './types'; +import { toDbMs } from './query-shared'; export interface TrackerPreparedStatements { telemetryInsertStmt: ReturnType; @@ -128,7 +130,7 @@ function deduplicateExistingCoverArtRows(db: DatabaseSync): void { return; } - const nowMs = Date.now(); + const nowMsValue = toDbMs(nowMs()); const upsertBlobStmt = db.prepare(` INSERT INTO imm_cover_art_blobs (blob_hash, cover_blob, CREATED_DATE, LAST_UPDATE_DATE) VALUES (?, ?, ?, ?) @@ -150,14 +152,14 @@ function deduplicateExistingCoverArtRows(db: DatabaseSync): void { const refHash = parseCoverBlobReference(coverBlob); if (refHash) { if (row.cover_blob_hash !== refHash) { - updateMediaStmt.run(coverBlob, refHash, nowMs, row.video_id); + updateMediaStmt.run(coverBlob, refHash, nowMsValue, row.video_id); } continue; } const hash = createHash('sha256').update(coverBlob).digest('hex'); - upsertBlobStmt.run(hash, coverBlob, nowMs, nowMs); - updateMediaStmt.run(buildCoverBlobReference(hash), hash, nowMs, row.video_id); + upsertBlobStmt.run(hash, coverBlob, nowMsValue, nowMsValue); + updateMediaStmt.run(buildCoverBlobReference(hash), hash, nowMsValue, row.video_id); } } @@ -273,7 +275,7 @@ function parseLegacyAnimeBackfillCandidate( } function ensureLifetimeSummaryTables(db: DatabaseSync): void { - const nowMs = Date.now(); + const nowMsValue = toDbMs(nowMs()); db.exec(` CREATE TABLE IF NOT EXISTS imm_lifetime_global( @@ -315,8 +317,8 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void { 0, 0, NULL, - ${nowMs}, - ${nowMs} + ${nowMsValue}, + ${nowMsValue} WHERE NOT EXISTS (SELECT 1 FROM imm_lifetime_global LIMIT 1) `); @@ -403,13 +405,13 @@ export function getOrCreateAnimeRecord(db: DatabaseSync, input: AnimeRecordInput input.titleEnglish, input.titleNative, input.metadataJson, - Date.now(), + toDbMs(nowMs()), existing.anime_id, ); return existing.anime_id; } - const nowMs = Date.now(); + const nowMsValue = toDbMs(nowMs()); const result = db .prepare( ` @@ -434,8 +436,8 @@ export function getOrCreateAnimeRecord(db: DatabaseSync, input: AnimeRecordInput input.titleEnglish, input.titleNative, input.metadataJson, - nowMs, - nowMs, + nowMsValue, + nowMsValue, ); return Number(result.lastInsertRowid); } @@ -469,7 +471,7 @@ export function linkVideoToAnimeRecord( input.parserSource, input.parserConfidence, input.parseMetadataJson, - Date.now(), + toDbMs(nowMs()), videoId, ); } @@ -854,7 +856,7 @@ export function ensureSchema(db: DatabaseSync): void { addColumnIfMissing(db, 'imm_monthly_rollups', 'CREATED_DATE'); addColumnIfMissing(db, 'imm_monthly_rollups', 'LAST_UPDATE_DATE'); - const nowMs = Date.now(); + const migratedAtMs = toDbMs(nowMs()); db.prepare( ` UPDATE imm_videos @@ -894,7 +896,7 @@ export function ensureSchema(db: DatabaseSync): void { CREATED_DATE = COALESCE(CREATED_DATE, ?), LAST_UPDATE_DATE = COALESCE(LAST_UPDATE_DATE, ?) `, - ).run(nowMs, nowMs); + ).run(migratedAtMs, migratedAtMs); db.prepare( ` UPDATE imm_monthly_rollups @@ -902,7 +904,7 @@ export function ensureSchema(db: DatabaseSync): void { CREATED_DATE = COALESCE(CREATED_DATE, ?), LAST_UPDATE_DATE = COALESCE(LAST_UPDATE_DATE, ?) `, - ).run(nowMs, nowMs); + ).run(migratedAtMs, migratedAtMs); } if (currentVersion?.schema_version === 1 || currentVersion?.schema_version === 2) { @@ -1241,7 +1243,7 @@ export function ensureSchema(db: DatabaseSync): void { db.exec(` INSERT INTO imm_schema_version(schema_version, applied_at_ms) - VALUES (${SCHEMA_VERSION}, ${Date.now()}) + VALUES (${SCHEMA_VERSION}, ${toDbMs(nowMs())}) ON CONFLICT DO NOTHING `); } @@ -1399,28 +1401,48 @@ function incrementKanjiAggregate( } export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedStatements): void { + const currentMs = toDbMs(nowMs()); if (write.kind === 'telemetry') { - const nowMs = Date.now(); + if ( + write.totalWatchedMs === undefined || + write.activeWatchedMs === undefined || + write.linesSeen === undefined || + write.tokensSeen === undefined || + write.cardsMined === undefined || + write.lookupCount === undefined || + write.lookupHits === undefined || + write.yomitanLookupCount === undefined || + write.pauseCount === undefined || + write.pauseMs === undefined || + write.seekForwardCount === undefined || + write.seekBackwardCount === undefined || + write.mediaBufferEvents === undefined + ) { + throw new Error('Incomplete telemetry write'); + } + const telemetrySampleMs = toDbMs(write.sampleMs ?? Number(currentMs)); stmts.telemetryInsertStmt.run( write.sessionId, - write.sampleMs!, - write.totalWatchedMs!, - write.activeWatchedMs!, - write.linesSeen!, - write.tokensSeen!, - write.cardsMined!, - write.lookupCount!, - write.lookupHits!, - write.yomitanLookupCount ?? 0, - write.pauseCount!, - write.pauseMs!, - write.seekForwardCount!, - write.seekBackwardCount!, - write.mediaBufferEvents!, - nowMs, - nowMs, + telemetrySampleMs, + write.totalWatchedMs, + write.activeWatchedMs, + write.linesSeen, + write.tokensSeen, + write.cardsMined, + write.lookupCount, + write.lookupHits, + write.yomitanLookupCount, + write.pauseCount, + write.pauseMs, + write.seekForwardCount, + write.seekBackwardCount, + write.mediaBufferEvents, + currentMs, + currentMs, ); - stmts.sessionCheckpointStmt.run(write.lastMediaMs ?? null, nowMs, write.sessionId); + if (write.lastMediaMs !== undefined) { + stmts.sessionCheckpointStmt.run(write.lastMediaMs ?? null, currentMs, write.sessionId); + } return; } if (write.kind === 'word') { @@ -1456,8 +1478,8 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta write.segmentEndMs ?? null, write.text, write.secondaryText ?? null, - Date.now(), - Date.now(), + currentMs, + currentMs, ); const lineId = Number(lineResult.lastInsertRowid); for (const occurrence of write.wordOccurrences) { @@ -1473,16 +1495,16 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta stmts.eventInsertStmt.run( write.sessionId, - write.sampleMs!, - write.eventType!, + toDbMs(write.sampleMs ?? Number(currentMs)), + write.eventType ?? 0, write.lineIndex ?? null, write.segmentStartMs ?? null, write.segmentEndMs ?? null, write.tokensDelta ?? 0, write.cardsDelta ?? 0, write.payloadJson ?? null, - Date.now(), - Date.now(), + currentMs, + currentMs, ); } @@ -1508,11 +1530,11 @@ export function getOrCreateVideoRecord( LAST_UPDATE_DATE = ? WHERE video_id = ? `, - ).run(details.canonicalTitle || 'unknown', Date.now(), existing.video_id); + ).run(details.canonicalTitle || 'unknown', toDbMs(nowMs()), existing.video_id); return existing.video_id; } - const nowMs = Date.now(); + const currentMs = toDbMs(nowMs()); const insert = db.prepare(` INSERT INTO imm_videos ( video_key, canonical_title, source_type, source_path, source_url, @@ -1539,8 +1561,8 @@ export function getOrCreateVideoRecord( null, null, null, - nowMs, - nowMs, + currentMs, + currentMs, ); return Number(result.lastInsertRowid); } @@ -1582,7 +1604,7 @@ export function updateVideoMetadataRecord( metadata.hashSha256, metadata.screenshotPath, metadata.metadataJson, - Date.now(), + toDbMs(nowMs()), videoId, ); } @@ -1600,7 +1622,7 @@ export function updateVideoTitleRecord( LAST_UPDATE_DATE = ? WHERE video_id = ? `, - ).run(canonicalTitle, Date.now(), videoId); + ).run(canonicalTitle, toDbMs(nowMs()), videoId); } export function upsertYoutubeVideoMetadata( @@ -1608,7 +1630,7 @@ export function upsertYoutubeVideoMetadata( videoId: number, metadata: YoutubeVideoMetadata, ): void { - const nowMs = Date.now(); + const currentMs = toDbMs(nowMs()); db.prepare( ` INSERT INTO imm_youtube_videos ( @@ -1659,8 +1681,8 @@ export function upsertYoutubeVideoMetadata( metadata.uploaderUrl ?? null, metadata.description ?? null, metadata.metadataJson ?? null, - nowMs, - nowMs, - nowMs, + currentMs, + currentMs, + currentMs, ); } diff --git a/src/core/services/immersion-tracker/time.test.ts b/src/core/services/immersion-tracker/time.test.ts new file mode 100644 index 0000000..08c5f54 --- /dev/null +++ b/src/core/services/immersion-tracker/time.test.ts @@ -0,0 +1,7 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { nowMs } from './time.js'; + +test('nowMs returns wall-clock epoch milliseconds', () => { + assert.ok(nowMs() > 1_600_000_000_000); +}); diff --git a/src/core/services/immersion-tracker/time.ts b/src/core/services/immersion-tracker/time.ts new file mode 100644 index 0000000..8ea2081 --- /dev/null +++ b/src/core/services/immersion-tracker/time.ts @@ -0,0 +1,8 @@ +export function nowMs(): number { + const perf = globalThis.performance; + if (perf && Number.isFinite(perf.timeOrigin)) { + return Math.floor(perf.timeOrigin + perf.now()); + } + + return Date.now(); +} diff --git a/src/core/services/index.ts b/src/core/services/index.ts index b63e7a7..3d0e3f7 100644 --- a/src/core/services/index.ts +++ b/src/core/services/index.ts @@ -79,10 +79,7 @@ export { handleOverlayWindowBeforeInputEvent, isTabInputForMpvForwarding, } from './overlay-window-input'; -export { - initializeOverlayAnkiIntegration, - initializeOverlayRuntime, -} from './overlay-runtime-init'; +export { initializeOverlayAnkiIntegration, initializeOverlayRuntime } from './overlay-runtime-init'; export { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility'; export { MPV_REQUEST_ID_SECONDARY_SUB_VISIBILITY, diff --git a/src/core/services/ipc.test.ts b/src/core/services/ipc.test.ts index 5cb4a7c..e5dae34 100644 --- a/src/core/services/ipc.test.ts +++ b/src/core/services/ipc.test.ts @@ -70,7 +70,11 @@ function createControllerConfigFixture() { nextAudio: { kind: 'button' as const, buttonIndex: 5 }, playCurrentAudio: { kind: 'button' as const, buttonIndex: 7 }, toggleMpvPause: { kind: 'button' as const, buttonIndex: 6 }, - leftStickHorizontal: { kind: 'axis' as const, axisIndex: 0, dpadFallback: 'horizontal' as const }, + leftStickHorizontal: { + kind: 'axis' as const, + axisIndex: 0, + dpadFallback: 'horizontal' as const, + }, leftStickVertical: { kind: 'axis' as const, axisIndex: 1, dpadFallback: 'vertical' as const }, rightStickHorizontal: { kind: 'axis' as const, axisIndex: 3, dpadFallback: 'none' as const }, rightStickVertical: { kind: 'axis' as const, axisIndex: 4, dpadFallback: 'none' as const }, diff --git a/src/core/services/ipc.ts b/src/core/services/ipc.ts index 331451a..a20374f 100644 --- a/src/core/services/ipc.ts +++ b/src/core/services/ipc.ts @@ -64,7 +64,9 @@ export interface IpcServiceDeps { getCurrentSecondarySub: () => string; focusMainWindow: () => void; runSubsyncManual: (request: SubsyncManualRunRequest) => Promise; - onYoutubePickerResolve: (request: YoutubePickerResolveRequest) => Promise; + onYoutubePickerResolve: ( + request: YoutubePickerResolveRequest, + ) => Promise; getAnkiConnectStatus: () => boolean; getRuntimeOptions: () => unknown; setRuntimeOption: (id: RuntimeOptionId, value: RuntimeOptionValue) => unknown; @@ -167,7 +169,9 @@ export interface IpcDepsRuntimeOptions { getMpvClient: () => MpvClientLike | null; focusMainWindow: () => void; runSubsyncManual: (request: SubsyncManualRunRequest) => Promise; - onYoutubePickerResolve: (request: YoutubePickerResolveRequest) => Promise; + onYoutubePickerResolve: ( + request: YoutubePickerResolveRequest, + ) => Promise; getAnkiConnectStatus: () => boolean; getRuntimeOptions: () => unknown; setRuntimeOption: (id: RuntimeOptionId, value: RuntimeOptionValue) => unknown; @@ -291,13 +295,16 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar deps.onOverlayModalOpened(parsedModal); }); - ipc.handle(IPC_CHANNELS.request.youtubePickerResolve, async (_event: unknown, request: unknown) => { - const parsedRequest = parseYoutubePickerResolveRequest(request); - if (!parsedRequest) { - return { ok: false, message: 'Invalid YouTube picker resolve payload' }; - } - return await deps.onYoutubePickerResolve(parsedRequest); - }); + ipc.handle( + IPC_CHANNELS.request.youtubePickerResolve, + async (_event: unknown, request: unknown) => { + const parsedRequest = parseYoutubePickerResolveRequest(request); + if (!parsedRequest) { + return { ok: false, message: 'Invalid YouTube picker resolve payload' }; + } + return await deps.onYoutubePickerResolve(parsedRequest); + }, + ); ipc.on(IPC_CHANNELS.command.openYomitanSettings, () => { deps.openYomitanSettings(); @@ -375,13 +382,16 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar }, ); - ipc.handle(IPC_CHANNELS.command.saveControllerConfig, async (_event: unknown, update: unknown) => { - const parsedUpdate = parseControllerConfigUpdate(update); - if (!parsedUpdate) { - throw new Error('Invalid controller config payload'); - } - await deps.saveControllerConfig(parsedUpdate); - }); + ipc.handle( + IPC_CHANNELS.command.saveControllerConfig, + async (_event: unknown, update: unknown) => { + const parsedUpdate = parseControllerConfigUpdate(update); + if (!parsedUpdate) { + throw new Error('Invalid controller config payload'); + } + await deps.saveControllerConfig(parsedUpdate); + }, + ); ipc.handle(IPC_CHANNELS.request.getMecabStatus, () => { return deps.getMecabStatus(); diff --git a/src/core/services/jellyfin-remote.test.ts b/src/core/services/jellyfin-remote.test.ts index 071bada..a889993 100644 --- a/src/core/services/jellyfin-remote.test.ts +++ b/src/core/services/jellyfin-remote.test.ts @@ -263,7 +263,9 @@ test('reportProgress posts timeline payload and treats failure as non-fatal', as audioStreamIndex: 1, subtitleStreamIndex: 2, }); - const expectedPostedPayload = JSON.parse(JSON.stringify(expectedPayload)); + const expectedPostedPayload = Object.fromEntries( + Object.entries(structuredClone(expectedPayload)).filter(([, value]) => value !== undefined), + ); const ok = await service.reportProgress({ itemId: 'movie-2', diff --git a/src/core/services/jellyfin-token-store.ts b/src/core/services/jellyfin-token-store.ts index 179abf3..e125c23 100644 --- a/src/core/services/jellyfin-token-store.ts +++ b/src/core/services/jellyfin-token-store.ts @@ -1,6 +1,6 @@ import * as fs from 'fs'; -import * as path from 'path'; import electron from 'electron'; +import { ensureDirForFile } from '../../shared/fs-utils'; const { safeStorage } = electron; @@ -27,15 +27,8 @@ export interface JellyfinTokenStore { clearSession: () => void; } -function ensureDirectory(filePath: string): void { - const dir = path.dirname(filePath); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true }); - } -} - function writePayload(filePath: string, payload: PersistedSessionPayload): void { - ensureDirectory(filePath); + ensureDirForFile(filePath); fs.writeFileSync(filePath, JSON.stringify(payload, null, 2), 'utf-8'); } diff --git a/src/core/services/jlpt-token-filter.test.ts b/src/core/services/jlpt-token-filter.test.ts new file mode 100644 index 0000000..1dca794 --- /dev/null +++ b/src/core/services/jlpt-token-filter.test.ts @@ -0,0 +1,51 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { + getIgnoredPos1Entries, + JLPT_EXCLUDED_TERMS, + JLPT_IGNORED_MECAB_POS1, + JLPT_IGNORED_MECAB_POS1_ENTRIES, + JLPT_IGNORED_MECAB_POS1_LIST, + shouldIgnoreJlptByTerm, + shouldIgnoreJlptForMecabPos1, +} from './jlpt-token-filter'; + +test('shouldIgnoreJlptByTerm matches the excluded JLPT lexical terms', () => { + assert.equal(shouldIgnoreJlptByTerm('この'), true); + assert.equal(shouldIgnoreJlptByTerm('そこ'), true); + assert.equal(shouldIgnoreJlptByTerm('猫'), false); + assert.deepEqual(Array.from(JLPT_EXCLUDED_TERMS), [ + 'この', + 'その', + 'あの', + 'どの', + 'これ', + 'それ', + 'あれ', + 'どれ', + 'ここ', + 'そこ', + 'あそこ', + 'どこ', + 'こと', + 'ああ', + 'ええ', + 'うう', + 'おお', + 'はは', + 'へえ', + 'ふう', + 'ほう', + ]); +}); + +test('shouldIgnoreJlptForMecabPos1 matches the exported ignored POS1 list', () => { + assert.equal(shouldIgnoreJlptForMecabPos1('助詞'), true); + assert.equal(shouldIgnoreJlptForMecabPos1('名詞'), false); + assert.deepEqual(JLPT_IGNORED_MECAB_POS1, JLPT_IGNORED_MECAB_POS1_LIST); + assert.deepEqual( + JLPT_IGNORED_MECAB_POS1_ENTRIES.map((entry) => entry.pos1), + JLPT_IGNORED_MECAB_POS1_LIST, + ); + assert.deepEqual(getIgnoredPos1Entries(), JLPT_IGNORED_MECAB_POS1_ENTRIES); +}); diff --git a/src/core/services/overlay-window-input.ts b/src/core/services/overlay-window-input.ts index 33f31bb..0ad8be5 100644 --- a/src/core/services/overlay-window-input.ts +++ b/src/core/services/overlay-window-input.ts @@ -59,3 +59,21 @@ export function handleOverlayWindowBeforeInputEvent(options: { options.preventDefault(); return true; } + +export function handleOverlayWindowBlurred(options: { + kind: OverlayWindowKind; + windowVisible: boolean; + isOverlayVisible: (kind: OverlayWindowKind) => boolean; + ensureOverlayWindowLevel: () => void; + moveWindowTop: () => void; +}): boolean { + if (options.kind === 'visible' && !options.isOverlayVisible(options.kind)) { + return false; + } + + options.ensureOverlayWindowLevel(); + if (options.kind === 'visible' && options.windowVisible) { + options.moveWindowTop(); + } + return true; +} diff --git a/src/core/services/overlay-window.test.ts b/src/core/services/overlay-window.test.ts index 1fa1cfa..42e8a77 100644 --- a/src/core/services/overlay-window.test.ts +++ b/src/core/services/overlay-window.test.ts @@ -2,6 +2,7 @@ import assert from 'node:assert/strict'; import test from 'node:test'; import { handleOverlayWindowBeforeInputEvent, + handleOverlayWindowBlurred, isTabInputForMpvForwarding, } from './overlay-window-input'; @@ -82,3 +83,58 @@ test('handleOverlayWindowBeforeInputEvent leaves modal Tab handling alone', () = assert.equal(handled, false); assert.deepEqual(calls, []); }); + +test('handleOverlayWindowBlurred skips visible overlay restacking after manual hide', () => { + const calls: string[] = []; + + const handled = handleOverlayWindowBlurred({ + kind: 'visible', + windowVisible: true, + isOverlayVisible: () => false, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + moveWindowTop: () => { + calls.push('move-top'); + }, + }); + + assert.equal(handled, false); + assert.deepEqual(calls, []); +}); + +test('handleOverlayWindowBlurred preserves active visible/modal window stacking', () => { + const calls: string[] = []; + + assert.equal( + handleOverlayWindowBlurred({ + kind: 'visible', + windowVisible: true, + isOverlayVisible: () => true, + ensureOverlayWindowLevel: () => { + calls.push('ensure-visible'); + }, + moveWindowTop: () => { + calls.push('move-visible'); + }, + }), + true, + ); + + assert.equal( + handleOverlayWindowBlurred({ + kind: 'modal', + windowVisible: true, + isOverlayVisible: () => false, + ensureOverlayWindowLevel: () => { + calls.push('ensure-modal'); + }, + moveWindowTop: () => { + calls.push('move-modal'); + }, + }), + true, + ); + + assert.deepEqual(calls, ['ensure-visible', 'move-visible', 'ensure-modal']); +}); diff --git a/src/core/services/overlay-window.ts b/src/core/services/overlay-window.ts index 96393ef..6b7b4c6 100644 --- a/src/core/services/overlay-window.ts +++ b/src/core/services/overlay-window.ts @@ -5,6 +5,7 @@ import { createLogger } from '../../logger'; import { IPC_CHANNELS } from '../../shared/ipc/contracts'; import { handleOverlayWindowBeforeInputEvent, + handleOverlayWindowBlurred, type OverlayWindowKind, } from './overlay-window-input'; import { buildOverlayWindowOptions } from './overlay-window-options'; @@ -124,12 +125,18 @@ export function createOverlayWindow( }); window.on('blur', () => { - if (!window.isDestroyed()) { - options.ensureOverlayWindowLevel(window); - if (kind === 'visible' && window.isVisible()) { + if (window.isDestroyed()) return; + handleOverlayWindowBlurred({ + kind, + windowVisible: window.isVisible(), + isOverlayVisible: options.isOverlayVisible, + ensureOverlayWindowLevel: () => { + options.ensureOverlayWindowLevel(window); + }, + moveWindowTop: () => { window.moveTop(); - } - } + }, + }); }); if (options.isDev && kind === 'visible') { diff --git a/src/core/services/runtime-options-ipc.ts b/src/core/services/runtime-options-ipc.ts index e87c89f..71d32ef 100644 --- a/src/core/services/runtime-options-ipc.ts +++ b/src/core/services/runtime-options-ipc.ts @@ -1,4 +1,8 @@ -import { RuntimeOptionApplyResult, RuntimeOptionId, RuntimeOptionValue } from '../../types'; +import { + RuntimeOptionApplyResult, + RuntimeOptionId, + RuntimeOptionValue, +} from '../../types/runtime-options'; export interface RuntimeOptionsManagerLike { setOptionValue: (id: RuntimeOptionId, value: RuntimeOptionValue) => RuntimeOptionApplyResult; diff --git a/src/core/services/stats-server.ts b/src/core/services/stats-server.ts index e2cbb46..986a951 100644 --- a/src/core/services/stats-server.ts +++ b/src/core/services/stats-server.ts @@ -1,8 +1,9 @@ import { Hono } from 'hono'; -import { serve } from '@hono/node-server'; import type { ImmersionTrackerService } from './immersion-tracker-service.js'; +import http, { type IncomingMessage, type ServerResponse } from 'node:http'; import { basename, extname, resolve, sep } from 'node:path'; import { readFileSync, existsSync, statSync } from 'node:fs'; +import { Readable } from 'node:stream'; import { MediaGenerator } from '../../media-generator.js'; import { AnkiConnectClient } from '../../anki-connect.js'; import type { AnkiConnectConfig } from '../../types.js'; @@ -60,6 +61,71 @@ function resolveStatsNoteFieldName( return null; } +function toFetchHeaders(headers: IncomingMessage['headers']): Headers { + const fetchHeaders = new Headers(); + for (const [name, value] of Object.entries(headers)) { + if (value === undefined) continue; + if (Array.isArray(value)) { + for (const entry of value) { + fetchHeaders.append(name, entry); + } + continue; + } + fetchHeaders.set(name, value); + } + return fetchHeaders; +} + +function toFetchRequest(req: IncomingMessage): Request { + const method = req.method ?? 'GET'; + const url = new URL(req.url ?? '/', `http://${req.headers.host ?? '127.0.0.1'}`); + const init: RequestInit & { duplex?: 'half' } = { + method, + headers: toFetchHeaders(req.headers), + }; + + if (method !== 'GET' && method !== 'HEAD') { + init.body = Readable.toWeb(req) as BodyInit; + init.duplex = 'half'; + } + + return new Request(url, init); +} + +async function writeFetchResponse(res: ServerResponse, response: Response): Promise { + res.statusCode = response.status; + response.headers.forEach((value, key) => { + res.setHeader(key, value); + }); + + const body = await response.arrayBuffer(); + res.end(Buffer.from(body)); +} + +function startNodeHttpServer( + app: Hono, + config: StatsServerConfig, +): { close: () => void } { + const server = http.createServer((req, res) => { + void (async () => { + try { + await writeFetchResponse(res, await app.fetch(toFetchRequest(req))); + } catch { + res.statusCode = 500; + res.end('Internal Server Error'); + } + })(); + }); + + server.listen(config.port, '127.0.0.1'); + + return { + close: () => { + server.close(); + }, + }; +} + /** Load known words cache from disk into a Set. Returns null if unavailable. */ function loadKnownWordsSet(cachePath: string | undefined): Set | null { if (!cachePath || !existsSync(cachePath)) return null; @@ -182,7 +248,9 @@ function buildAnkiNotePreview( return { word: getPreferredNoteFieldValue(fields, [getConfiguredWordFieldName(ankiConfig)]), sentence: getPreferredNoteFieldValue(fields, [getConfiguredSentenceFieldName(ankiConfig)]), - translation: getPreferredNoteFieldValue(fields, [getConfiguredTranslationFieldName(ankiConfig)]), + translation: getPreferredNoteFieldValue(fields, [ + getConfiguredTranslationFieldName(ankiConfig), + ]), }; } @@ -657,7 +725,11 @@ export function createStatsApp( method: 'POST', headers: { 'Content-Type': 'application/json' }, signal: AbortSignal.timeout(ANKI_CONNECT_FETCH_TIMEOUT_MS), - body: JSON.stringify({ action: 'notesInfo', version: 6, params: { notes: resolvedNoteIds } }), + body: JSON.stringify({ + action: 'notesInfo', + version: 6, + params: { notes: resolvedNoteIds }, + }), }); const result = (await response.json()) as { result?: Array<{ noteId: number; fields: Record }>; @@ -1001,15 +1073,29 @@ export function startStatsServer(config: StatsServerConfig): { close: () => void resolveAnkiNoteId: config.resolveAnkiNoteId, }); - const server = serve({ - fetch: app.fetch, - port: config.port, - hostname: '127.0.0.1', - }); - - return { - close: () => { - server.close(); - }, + const bunRuntime = globalThis as typeof globalThis & { + Bun?: { + serve?: (options: { + fetch: (typeof app)['fetch']; + port: number; + hostname: string; + }) => { stop: () => void }; + }; }; + + if (bunRuntime.Bun?.serve) { + const server = bunRuntime.Bun.serve({ + fetch: app.fetch, + port: config.port, + hostname: '127.0.0.1', + }); + + return { + close: () => { + server.stop(); + }, + }; + } + + return startNodeHttpServer(app, config); } diff --git a/src/core/services/subtitle-cue-parser.test.ts b/src/core/services/subtitle-cue-parser.test.ts index 5c59e62..d908f14 100644 --- a/src/core/services/subtitle-cue-parser.test.ts +++ b/src/core/services/subtitle-cue-parser.test.ts @@ -35,6 +35,21 @@ test('parseSrtCues handles multi-line subtitle text', () => { assert.equal(cues[0]!.text, 'これは\nテストです'); }); +test('parseSrtCues strips HTML-like markup while preserving line breaks', () => { + const content = [ + '1', + '00:01:00,000 --> 00:01:05,000', + 'これは', + 'テストです', + '', + ].join('\n'); + + const cues = parseSrtCues(content); + + assert.equal(cues.length, 1); + assert.equal(cues[0]!.text, 'これは\nテストです'); +}); + test('parseSrtCues handles hours in timestamps', () => { const content = ['1', '01:30:00,000 --> 01:30:05,000', 'テスト', ''].join('\n'); @@ -134,6 +149,18 @@ test('parseAssCues handles \\N line breaks', () => { assert.equal(cues[0]!.text, '一行目\\N二行目'); }); +test('parseAssCues strips HTML-like markup while preserving ASS line breaks', () => { + const content = [ + '[Events]', + 'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text', + 'Dialogue: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,一行目\\N二行目', + ].join('\n'); + + const cues = parseAssCues(content); + + assert.equal(cues[0]!.text, '一行目\\N二行目'); +}); + test('parseAssCues returns empty for content without Events section', () => { const content = ['[Script Info]', 'Title: Test'].join('\n'); diff --git a/src/core/services/subtitle-cue-parser.ts b/src/core/services/subtitle-cue-parser.ts index 760bba3..e1f0434 100644 --- a/src/core/services/subtitle-cue-parser.ts +++ b/src/core/services/subtitle-cue-parser.ts @@ -4,6 +4,8 @@ export interface SubtitleCue { text: string; } +const HTML_SUBTITLE_TAG_PATTERN = /<\/?[A-Za-z][^>\n]*>/g; + const SRT_TIMING_PATTERN = /^\s*(?:(\d{1,2}):)?(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(?:(\d{1,2}):)?(\d{2}):(\d{2})[,.](\d{1,3})/; @@ -21,6 +23,10 @@ function parseTimestamp( ); } +function sanitizeSubtitleCueText(text: string): string { + return text.replace(ASS_OVERRIDE_TAG_PATTERN, '').replace(HTML_SUBTITLE_TAG_PATTERN, '').trim(); +} + export function parseSrtCues(content: string): SubtitleCue[] { const cues: SubtitleCue[] = []; const lines = content.split(/\r?\n/); @@ -54,7 +60,7 @@ export function parseSrtCues(content: string): SubtitleCue[] { i += 1; } - const text = textLines.join('\n').trim(); + const text = sanitizeSubtitleCueText(textLines.join('\n')); if (text) { cues.push({ startTime, endTime, text }); } @@ -140,13 +146,9 @@ export function parseAssCues(content: string): SubtitleCue[] { continue; } - const rawText = fields - .slice(textFieldIndex) - .join(',') - .replace(ASS_OVERRIDE_TAG_PATTERN, '') - .trim(); - if (rawText) { - cues.push({ startTime, endTime, text: rawText }); + const text = sanitizeSubtitleCueText(fields.slice(textFieldIndex).join(',')); + if (text) { + cues.push({ startTime, endTime, text }); } } diff --git a/src/core/services/subtitle-position.test.ts b/src/core/services/subtitle-position.test.ts new file mode 100644 index 0000000..c70aac5 --- /dev/null +++ b/src/core/services/subtitle-position.test.ts @@ -0,0 +1,113 @@ +import assert from 'node:assert/strict'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import test from 'node:test'; +import { + loadSubtitlePosition, + saveSubtitlePosition, + updateCurrentMediaPath, +} from './subtitle-position'; + +function makeTempDir(): string { + return fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-subtitle-position-test-')); +} + +test('saveSubtitlePosition queues pending position when media path is unavailable', () => { + const queued: Array<{ yPercent: number }> = []; + let persisted = false; + + saveSubtitlePosition({ + position: { yPercent: 21 }, + currentMediaPath: null, + subtitlePositionsDir: makeTempDir(), + onQueuePending: (position) => { + queued.push(position); + }, + onPersisted: () => { + persisted = true; + }, + }); + + assert.deepEqual(queued, [{ yPercent: 21 }]); + assert.equal(persisted, false); +}); + +test('saveSubtitlePosition persists and loadSubtitlePosition restores the stored position', () => { + const dir = makeTempDir(); + const mediaPath = path.join(dir, 'episode.mkv'); + const position = { yPercent: 37 }; + let persisted = false; + + saveSubtitlePosition({ + position, + currentMediaPath: mediaPath, + subtitlePositionsDir: dir, + onQueuePending: () => { + throw new Error('unexpected queue'); + }, + onPersisted: () => { + persisted = true; + }, + }); + + const loaded = loadSubtitlePosition({ + currentMediaPath: mediaPath, + fallbackPosition: { yPercent: 0 }, + subtitlePositionsDir: dir, + }); + + assert.equal(persisted, true); + assert.deepEqual(loaded, position); + assert.equal( + fs.readdirSync(dir).some((entry) => entry.endsWith('.json')), + true, + ); +}); + +test('updateCurrentMediaPath persists a queued subtitle position before broadcasting', () => { + const dir = makeTempDir(); + let currentMediaPath: string | null = null; + let cleared = false; + const setPositions: Array<{ yPercent: number } | null> = []; + const broadcasts: Array<{ yPercent: number } | null> = []; + const pending = { yPercent: 64 }; + + updateCurrentMediaPath({ + mediaPath: path.join(dir, 'video.mkv'), + currentMediaPath, + pendingSubtitlePosition: pending, + subtitlePositionsDir: dir, + loadSubtitlePosition: () => + loadSubtitlePosition({ + currentMediaPath, + fallbackPosition: { yPercent: 0 }, + subtitlePositionsDir: dir, + }), + setCurrentMediaPath: (next) => { + currentMediaPath = next; + }, + clearPendingSubtitlePosition: () => { + cleared = true; + }, + setSubtitlePosition: (position) => { + setPositions.push(position); + }, + broadcastSubtitlePosition: (position) => { + broadcasts.push(position); + }, + }); + + assert.equal(currentMediaPath, path.join(dir, 'video.mkv')); + assert.equal(cleared, true); + assert.deepEqual(setPositions, [pending]); + assert.deepEqual(broadcasts, [pending]); + assert.deepEqual( + loadSubtitlePosition({ + currentMediaPath, + fallbackPosition: { yPercent: 0 }, + subtitlePositionsDir: dir, + }), + pending, + ); +}); diff --git a/src/core/services/subtitle-processing-controller.test.ts b/src/core/services/subtitle-processing-controller.test.ts index 7a32549..ef1e165 100644 --- a/src/core/services/subtitle-processing-controller.test.ts +++ b/src/core/services/subtitle-processing-controller.test.ts @@ -228,7 +228,11 @@ test('consumeCachedSubtitle returns prefetched payload and prevents reprocessing controller.onSubtitleChange('猫\nです'); await flushMicrotasks(); - assert.equal(tokenizeCalls, 0, 'same cached subtitle should not reprocess after immediate consume'); + assert.equal( + tokenizeCalls, + 0, + 'same cached subtitle should not reprocess after immediate consume', + ); assert.deepEqual(emitted, []); }); diff --git a/src/core/services/tokenizer.test.ts b/src/core/services/tokenizer.test.ts index 2fd57c0..670f005 100644 --- a/src/core/services/tokenizer.test.ts +++ b/src/core/services/tokenizer.test.ts @@ -3428,40 +3428,43 @@ test('tokenizeSubtitle keeps standalone grammar-only tokens hoverable while clea test('tokenizeSubtitle keeps trailing quote-particle merged tokens hoverable while clearing only their annotation metadata', async () => { const result = await tokenizeSubtitle( 'どうしてもって', - makeDepsFromYomitanTokens([{ surface: 'どうしてもって', reading: 'どうしてもって', headword: 'どうしても' }], { - getFrequencyDictionaryEnabled: () => true, - getFrequencyRank: (text) => (text === 'どうしても' ? 123 : null), - getJlptLevel: (text) => (text === 'どうしても' ? 'N3' : null), - tokenizeWithMecab: async () => [ - { - headword: 'どうしても', - surface: 'どうしても', - reading: 'ドウシテモ', - startPos: 0, - endPos: 5, - partOfSpeech: PartOfSpeech.other, - pos1: '副詞', - pos2: '一般', - isMerged: false, - isKnown: false, - isNPlusOneTarget: false, - }, - { - headword: 'って', - surface: 'って', - reading: 'ッテ', - startPos: 5, - endPos: 7, - partOfSpeech: PartOfSpeech.particle, - pos1: '助詞', - pos2: '格助詞', - isMerged: false, - isKnown: false, - isNPlusOneTarget: false, - }, - ], - getMinSentenceWordsForNPlusOne: () => 1, - }), + makeDepsFromYomitanTokens( + [{ surface: 'どうしてもって', reading: 'どうしてもって', headword: 'どうしても' }], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => (text === 'どうしても' ? 123 : null), + getJlptLevel: (text) => (text === 'どうしても' ? 'N3' : null), + tokenizeWithMecab: async () => [ + { + headword: 'どうしても', + surface: 'どうしても', + reading: 'ドウシテモ', + startPos: 0, + endPos: 5, + partOfSpeech: PartOfSpeech.other, + pos1: '副詞', + pos2: '一般', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'って', + surface: 'って', + reading: 'ッテ', + startPos: 5, + endPos: 7, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '格助詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + getMinSentenceWordsForNPlusOne: () => 1, + }, + ), ); assert.equal(result.text, 'どうしてもって'); @@ -3812,7 +3815,14 @@ test('tokenizeSubtitle clears all annotations for explanatory pondering endings' jlptLevel: token.jlptLevel, })), [ - { surface: '俺', headword: '俺', isKnown: true, isNPlusOneTarget: false, frequencyRank: 19, jlptLevel: 'N5' }, + { + surface: '俺', + headword: '俺', + isKnown: true, + isNPlusOneTarget: false, + frequencyRank: 19, + jlptLevel: 'N5', + }, { surface: 'どうかしちゃった', headword: 'どうかしちゃう', diff --git a/src/core/services/tokenizer/annotation-stage.ts b/src/core/services/tokenizer/annotation-stage.ts index 99fc5c1..c57d935 100644 --- a/src/core/services/tokenizer/annotation-stage.ts +++ b/src/core/services/tokenizer/annotation-stage.ts @@ -140,7 +140,11 @@ function isExcludedFromSubtitleAnnotationsByPos1(normalizedPos1: string): boolea function isExcludedTrailingParticleMergedToken(token: MergedToken): boolean { const normalizedSurface = normalizeJlptTextForExclusion(token.surface); const normalizedHeadword = normalizeJlptTextForExclusion(token.headword); - if (!normalizedSurface || !normalizedHeadword || !normalizedSurface.startsWith(normalizedHeadword)) { + if ( + !normalizedSurface || + !normalizedHeadword || + !normalizedSurface.startsWith(normalizedHeadword) + ) { return false; } @@ -164,7 +168,10 @@ function isExcludedTrailingParticleMergedToken(token: MergedToken): boolean { function isAuxiliaryStemGrammarTailToken(token: MergedToken): boolean { const pos1Parts = splitNormalizedTagParts(normalizePos1Tag(token.pos1)); - if (pos1Parts.length === 0 || !pos1Parts.every((part) => AUXILIARY_STEM_GRAMMAR_TAIL_POS1.has(part))) { + if ( + pos1Parts.length === 0 || + !pos1Parts.every((part) => AUXILIARY_STEM_GRAMMAR_TAIL_POS1.has(part)) + ) { return false; } diff --git a/src/core/services/tokenizer/subtitle-annotation-filter.ts b/src/core/services/tokenizer/subtitle-annotation-filter.ts index f64edde..8b2a3d4 100644 --- a/src/core/services/tokenizer/subtitle-annotation-filter.ts +++ b/src/core/services/tokenizer/subtitle-annotation-filter.ts @@ -46,7 +46,11 @@ const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_TRAILING_PARTICLES = [ 'かな', 'かね', ] as const; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_THOUGHT_SUFFIXES = ['か', 'かな', 'かね'] as const; +const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_THOUGHT_SUFFIXES = [ + 'か', + 'かな', + 'かね', +] as const; const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS = new Set( SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_PREFIXES.flatMap((prefix) => SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_CORES.flatMap((core) => @@ -96,9 +100,7 @@ function isExcludedByTagSet(normalizedTag: string, exclusions: ReadonlySet exclusions.has(part)); } -function resolvePos1Exclusions( - options: SubtitleAnnotationFilterOptions = {}, -): ReadonlySet { +function resolvePos1Exclusions(options: SubtitleAnnotationFilterOptions = {}): ReadonlySet { if (options.pos1Exclusions) { return options.pos1Exclusions; } @@ -106,9 +108,7 @@ function resolvePos1Exclusions( return resolveAnnotationPos1ExclusionSet(DEFAULT_ANNOTATION_POS1_EXCLUSION_CONFIG); } -function resolvePos2Exclusions( - options: SubtitleAnnotationFilterOptions = {}, -): ReadonlySet { +function resolvePos2Exclusions(options: SubtitleAnnotationFilterOptions = {}): ReadonlySet { if (options.pos2Exclusions) { return options.pos2Exclusions; } @@ -212,7 +212,11 @@ function isReduplicatedKanaSfxWithOptionalTrailingTo(text: string): boolean { function isExcludedTrailingParticleMergedToken(token: MergedToken): boolean { const normalizedSurface = normalizeKana(token.surface); const normalizedHeadword = normalizeKana(token.headword); - if (!normalizedSurface || !normalizedHeadword || !normalizedSurface.startsWith(normalizedHeadword)) { + if ( + !normalizedSurface || + !normalizedHeadword || + !normalizedSurface.startsWith(normalizedHeadword) + ) { return false; } @@ -236,7 +240,10 @@ function isExcludedTrailingParticleMergedToken(token: MergedToken): boolean { function isAuxiliaryStemGrammarTailToken(token: MergedToken): boolean { const pos1Parts = splitNormalizedTagParts(normalizePosTag(token.pos1)); - if (pos1Parts.length === 0 || !pos1Parts.every((part) => AUXILIARY_STEM_GRAMMAR_TAIL_POS1.has(part))) { + if ( + pos1Parts.length === 0 || + !pos1Parts.every((part) => AUXILIARY_STEM_GRAMMAR_TAIL_POS1.has(part)) + ) { return false; } diff --git a/src/core/services/tokenizer/yomitan-parser-runtime.test.ts b/src/core/services/tokenizer/yomitan-parser-runtime.test.ts index 5ccb443..354db96 100644 --- a/src/core/services/tokenizer/yomitan-parser-runtime.test.ts +++ b/src/core/services/tokenizer/yomitan-parser-runtime.test.ts @@ -1255,7 +1255,7 @@ test('dictionary settings helpers upsert and remove dictionary entries without r const deps = createDeps(async (script) => { scripts.push(script); if (script.includes('optionsGetFull')) { - return JSON.parse(JSON.stringify(optionsFull)); + return structuredClone(optionsFull); } if (script.includes('setAllSettings')) { return true; diff --git a/src/core/services/youtube/labels.ts b/src/core/services/youtube/labels.ts index 9e623f0..7a40027 100644 --- a/src/core/services/youtube/labels.ts +++ b/src/core/services/youtube/labels.ts @@ -3,7 +3,11 @@ import type { YoutubeTrackKind } from './kinds'; export type { YoutubeTrackKind }; export function normalizeYoutubeLangCode(value: string): string { - return value.trim().toLowerCase().replace(/_/g, '-').replace(/[^a-z0-9-]+/g, ''); + return value + .trim() + .toLowerCase() + .replace(/_/g, '-') + .replace(/[^a-z0-9-]+/g, ''); } export function isJapaneseYoutubeLang(value: string): boolean { diff --git a/src/core/services/youtube/metadata-probe.test.ts b/src/core/services/youtube/metadata-probe.test.ts index 5db5743..05e64c1 100644 --- a/src/core/services/youtube/metadata-probe.test.ts +++ b/src/core/services/youtube/metadata-probe.test.ts @@ -75,15 +75,11 @@ test('probeYoutubeVideoMetadata returns null on malformed yt-dlp JSON', async () }); }); -test( - 'probeYoutubeVideoMetadata times out when yt-dlp hangs', - { timeout: 20_000 }, - async () => { - await withHangingFakeYtDlp(async () => { - await assert.rejects( - probeYoutubeVideoMetadata('https://www.youtube.com/watch?v=abc123'), - /timed out after 15000ms/, - ); - }); - }, -); +test('probeYoutubeVideoMetadata times out when yt-dlp hangs', { timeout: 20_000 }, async () => { + await withHangingFakeYtDlp(async () => { + await assert.rejects( + probeYoutubeVideoMetadata('https://www.youtube.com/watch?v=abc123'), + /timed out after 15000ms/, + ); + }); +}); diff --git a/src/core/services/youtube/timedtext.ts b/src/core/services/youtube/timedtext.ts index e7d2231..fe3aaf4 100644 --- a/src/core/services/youtube/timedtext.ts +++ b/src/core/services/youtube/timedtext.ts @@ -25,9 +25,7 @@ function decodeHtmlEntities(value: string): string { .replace(/>/g, '>') .replace(/"/g, '"') .replace(/'/g, "'") - .replace(/&#(\d+);/g, (match, codePoint) => - decodeNumericEntity(match, Number(codePoint)), - ) + .replace(/&#(\d+);/g, (match, codePoint) => decodeNumericEntity(match, Number(codePoint))) .replace(/&#x([0-9a-f]+);/gi, (match, codePoint) => decodeNumericEntity(match, Number.parseInt(codePoint, 16)), ); @@ -52,9 +50,7 @@ function extractYoutubeTimedTextRows(xml: string): YoutubeTimedTextRow[] { continue; } - const inner = (match[2] ?? '') - .replace(//gi, '\n') - .replace(/<[^>]+>/g, ''); + const inner = (match[2] ?? '').replace(//gi, '\n').replace(/<[^>]+>/g, ''); const text = decodeHtmlEntities(inner).trim(); if (!text) { continue; @@ -110,7 +106,9 @@ export function convertYoutubeTimedTextToVtt(xml: string): string { if (!text) { continue; } - blocks.push(`${formatVttTimestamp(row.startMs)} --> ${formatVttTimestamp(clampedEnd)}\n${text}`); + blocks.push( + `${formatVttTimestamp(row.startMs)} --> ${formatVttTimestamp(clampedEnd)}\n${text}`, + ); } return `WEBVTT\n\n${blocks.join('\n\n')}\n`; diff --git a/src/core/services/youtube/track-download.test.ts b/src/core/services/youtube/track-download.test.ts index 7832e2e..4f7ba11 100644 --- a/src/core/services/youtube/track-download.test.ts +++ b/src/core/services/youtube/track-download.test.ts @@ -16,7 +16,7 @@ async function withTempDir(fn: (dir: string) => Promise): Promise { function makeFakeYtDlpScript(dir: string): string { const scriptPath = path.join(dir, 'yt-dlp'); -const script = `#!/usr/bin/env node + const script = `#!/usr/bin/env node const fs = require('node:fs'); const path = require('node:path'); @@ -115,7 +115,9 @@ async function withFakeYtDlp( } async function withFakeYtDlpExpectations( - expectations: Partial>, + expectations: Partial< + Record<'YTDLP_EXPECT_AUTO_SUBS' | 'YTDLP_EXPECT_MANUAL_SUBS' | 'YTDLP_EXPECT_SUB_LANG', string> + >, fn: () => Promise, ): Promise { const previous = { @@ -144,11 +146,7 @@ async function withStubFetch( const originalFetch = globalThis.fetch; globalThis.fetch = (async (input: string | URL | Request) => { const url = - typeof input === 'string' - ? input - : input instanceof URL - ? input.toString() - : input.url; + typeof input === 'string' ? input : input instanceof URL ? input.toString() : input.url; return await handler(url); }) as typeof fetch; try { diff --git a/src/core/services/youtube/track-download.ts b/src/core/services/youtube/track-download.ts index 62c4bbe..8dac62e 100644 --- a/src/core/services/youtube/track-download.ts +++ b/src/core/services/youtube/track-download.ts @@ -13,7 +13,10 @@ const YOUTUBE_BATCH_PREFIX = 'youtube-batch'; const YOUTUBE_DOWNLOAD_TIMEOUT_MS = 15_000; function sanitizeFilenameSegment(value: string): string { - const sanitized = value.trim().replace(/[^a-z0-9_-]+/gi, '-').replace(/-+/g, '-'); + const sanitized = value + .trim() + .replace(/[^a-z0-9_-]+/gi, '-') + .replace(/-+/g, '-'); return sanitized.replace(/^-+|-+$/g, '') || 'unknown'; } @@ -163,10 +166,7 @@ async function downloadSubtitleFromUrl(input: { ? ext : 'vtt'; const safeSourceLanguage = sanitizeFilenameSegment(input.track.sourceLanguage); - const targetPath = path.join( - input.outputDir, - `${input.prefix}.${safeSourceLanguage}.${safeExt}`, - ); + const targetPath = path.join(input.outputDir, `${input.prefix}.${safeSourceLanguage}.${safeExt}`); const response = await fetch(input.track.downloadUrl, { signal: createFetchTimeoutSignal(YOUTUBE_DOWNLOAD_TIMEOUT_MS), }); diff --git a/src/core/services/youtube/track-probe.ts b/src/core/services/youtube/track-probe.ts index 16d4304..615c5e2 100644 --- a/src/core/services/youtube/track-probe.ts +++ b/src/core/services/youtube/track-probe.ts @@ -127,7 +127,10 @@ export async function probeYoutubeTracks(targetUrl: string): Promise track.kind === 'manual'), @@ -52,7 +53,11 @@ export function normalizeYoutubeTrackSelection(input: { primaryTrackId: string | null; secondaryTrackId: string | null; } { - if (input.primaryTrackId && input.secondaryTrackId && input.primaryTrackId === input.secondaryTrackId) { + if ( + input.primaryTrackId && + input.secondaryTrackId && + input.primaryTrackId === input.secondaryTrackId + ) { return { primaryTrackId: input.primaryTrackId, secondaryTrackId: null, @@ -60,4 +65,3 @@ export function normalizeYoutubeTrackSelection(input: { } return input; } - diff --git a/src/logger.ts b/src/logger.ts index 64e69d3..7c299cb 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -1,4 +1,7 @@ -import { appendLogLine, resolveDefaultLogFilePath as resolveSharedDefaultLogFilePath } from './shared/log-files'; +import { + appendLogLine, + resolveDefaultLogFilePath as resolveSharedDefaultLogFilePath, +} from './shared/log-files'; export type LogLevel = 'debug' | 'info' | 'warn' | 'error'; export type LogLevelSource = 'cli' | 'config'; diff --git a/src/main-entry-runtime.test.ts b/src/main-entry-runtime.test.ts index dd1f7a2..9a8bd17 100644 --- a/src/main-entry-runtime.test.ts +++ b/src/main-entry-runtime.test.ts @@ -82,10 +82,9 @@ test('stats-daemon entry helper detects internal daemon commands', () => { true, ); assert.equal( - shouldHandleStatsDaemonCommandAtEntry( - ['SubMiner.AppImage', '--stats-daemon-start'], - { ELECTRON_RUN_AS_NODE: '1' }, - ), + shouldHandleStatsDaemonCommandAtEntry(['SubMiner.AppImage', '--stats-daemon-start'], { + ELECTRON_RUN_AS_NODE: '1', + }), false, ); assert.equal(shouldHandleStatsDaemonCommandAtEntry(['SubMiner.AppImage', '--start'], {}), false); diff --git a/src/main.ts b/src/main.ts index 0a5bc98..4fc5f03 100644 --- a/src/main.ts +++ b/src/main.ts @@ -31,6 +31,7 @@ import { screen, } from 'electron'; import { applyControllerConfigUpdate } from './main/controller-config-update.js'; +import { createDiscordRpcClient } from './main/runtime/discord-rpc-client.js'; import { mergeAiConfig } from './ai/config'; function getPasswordStoreArg(argv: string[]): string | null { @@ -68,6 +69,26 @@ function getDefaultPasswordStore(): string { return 'gnome-libsecret'; } +function getStartupModeFlags(initialArgs: CliArgs | null | undefined): { + shouldUseMinimalStartup: boolean; + shouldSkipHeavyStartup: boolean; +} { + return { + shouldUseMinimalStartup: Boolean( + initialArgs?.texthooker || + (initialArgs?.stats && + (initialArgs.statsCleanup || initialArgs.statsBackground || initialArgs.statsStop)), + ), + shouldSkipHeavyStartup: Boolean( + initialArgs && + (shouldRunSettingsOnlyStartup(initialArgs) || + initialArgs.stats || + initialArgs.dictionary || + initialArgs.setup), + ), + }; +} + protocol.registerSchemesAsPrivileged([ { scheme: 'chrome-extension', @@ -101,8 +122,7 @@ import { AnkiIntegration } from './anki-integration'; import { SubtitleTimingTracker } from './subtitle-timing-tracker'; import { RuntimeOptionsManager } from './runtime-options'; import { downloadToFile, isRemoteMediaPath, parseMediaInfo } from './jimaku/utils'; -import { createLogger, setLogLevel, type LogLevelSource } from './logger'; -import { resolveDefaultLogFilePath } from './logger'; +import { createLogger, setLogLevel, resolveDefaultLogFilePath, type LogLevelSource } from './logger'; import { createWindowTracker as createWindowTrackerCore } from './window-trackers'; import { commandNeedsOverlayStartupPrereqs, @@ -111,10 +131,11 @@ import { parseArgs, shouldRunSettingsOnlyStartup, shouldStartApp, + type CliArgs, + type CliCommandSource, } from './cli/args'; -import type { CliArgs, CliCommandSource } from './cli/args'; import { printHelp } from './cli/help'; -import { IPC_CHANNELS } from './shared/ipc/contracts'; +import { IPC_CHANNELS, type OverlayHostedModal } from './shared/ipc/contracts'; import { buildConfigParseErrorDetails, buildConfigWarningDialogDetails, @@ -142,9 +163,9 @@ import { createGetDefaultSocketPathHandler, buildJellyfinSetupFormHtml, parseJellyfinSetupSubmissionUrl, + getConfiguredJellyfinSession, + type ActiveJellyfinRemotePlaybackState, } from './main/runtime/domains/jellyfin'; -import type { ActiveJellyfinRemotePlaybackState } from './main/runtime/domains/jellyfin'; -import { getConfiguredJellyfinSession } from './main/runtime/domains/jellyfin'; import { createBuildConfigHotReloadMessageMainDepsHandler, createBuildConfigHotReloadAppliedMainDepsHandler, @@ -170,7 +191,6 @@ import { createBuildEnforceOverlayLayerOrderMainDepsHandler, createBuildEnsureOverlayWindowLevelMainDepsHandler, createBuildUpdateVisibleOverlayBoundsMainDepsHandler, - createOverlayWindowRuntimeHandlers, createTrayRuntimeHandlers, createOverlayVisibilityRuntime, createBroadcastRuntimeOptionsChangedHandler, @@ -235,11 +255,6 @@ import { createHandleMineSentenceDigitHandler, createHandleMultiCopyDigitHandler, } from './main/runtime/domains/mining'; -import { - createCliCommandContextFactory, - createInitialArgsRuntimeHandler, - createCliCommandRuntimeHandler, -} from './main/runtime/domains/ipc'; import { enforceUnsupportedWaylandMode, forceX11Backend, @@ -325,11 +340,12 @@ import { shouldAutoOpenFirstRunSetup, } from './main/runtime/first-run-setup-service'; import { createYoutubeFlowRuntime } from './main/runtime/youtube-flow'; +import { createYoutubePlaybackRuntime } from './main/runtime/youtube-playback-runtime'; import { clearYoutubePrimarySubtitleNotificationTimer, createYoutubePrimarySubtitleNotificationRuntime, } from './main/runtime/youtube-primary-subtitle-notification'; -import { resolveAutoplayReadyMaxReleaseAttempts } from './main/runtime/startup-autoplay-release-policy'; +import { createAutoplayReadyGate } from './main/runtime/autoplay-ready-gate'; import { buildFirstRunSetupHtml, createMaybeFocusExistingFirstRunSetupWindowHandler, @@ -340,6 +356,7 @@ import { import { detectInstalledFirstRunPlugin, installFirstRunPluginToDefaultLocation, + syncInstalledFirstRunPluginBinaryPath, } from './main/runtime/first-run-setup-plugin'; import { applyWindowsMpvShortcuts, @@ -384,12 +401,16 @@ import { composeAnilistSetupHandlers, composeAnilistTrackingHandlers, composeAppReadyRuntime, + composeCliStartupHandlers, + composeHeadlessStartupHandlers, composeIpcRuntimeHandlers, composeJellyfinRuntimeHandlers, composeMpvRuntimeHandlers, + composeOverlayVisibilityRuntime, composeShortcutRuntimes, composeStartupLifecycleHandlers, } from './main/runtime/composers'; +import { createOverlayWindowRuntimeHandlers } from './main/runtime/overlay-window-runtime-handlers'; import { createStartupBootstrapRuntimeDeps } from './main/startup'; import { createAppLifecycleRuntimeRunner } from './main/startup-lifecycle'; import { @@ -400,10 +421,11 @@ import { import { handleMpvCommandFromIpcRuntime } from './main/ipc-mpv-command'; import { registerIpcRuntimeServices } from './main/ipc-runtime'; import { createAnkiJimakuIpcRuntimeServiceDeps } from './main/dependencies'; +import { createMainBootServices, type MainBootServicesResult } from './main/boot/services'; import { handleCliCommandRuntimeServiceWithContext } from './main/cli-runtime'; import { createOverlayModalRuntimeService } from './main/overlay-runtime'; +import { createOverlayModalInputState } from './main/runtime/overlay-modal-input-state'; import { openYoutubeTrackPicker } from './main/runtime/youtube-picker-open'; -import type { OverlayHostedModal } from './shared/ipc/contracts'; import { createOverlayShortcutsRuntimeService } from './main/overlay-shortcuts-runtime'; import { createFrequencyDictionaryRuntimeService, @@ -415,12 +437,22 @@ import { } from './main/jlpt-runtime'; import { createMediaRuntimeService } from './main/media-runtime'; import { createOverlayVisibilityRuntimeService } from './main/overlay-visibility-runtime'; +import { createDiscordPresenceRuntime } from './main/runtime/discord-presence-runtime'; import { createCharacterDictionaryRuntimeService } from './main/character-dictionary-runtime'; import { createCharacterDictionaryAutoSyncRuntimeService } from './main/runtime/character-dictionary-auto-sync'; import { handleCharacterDictionaryAutoSyncComplete } from './main/runtime/character-dictionary-auto-sync-completion'; import { notifyCharacterDictionaryAutoSyncStatus } from './main/runtime/character-dictionary-auto-sync-notifications'; import { createCurrentMediaTokenizationGate } from './main/runtime/current-media-tokenization-gate'; import { createStartupOsdSequencer } from './main/runtime/startup-osd-sequencer'; +import { + createRefreshSubtitlePrefetchFromActiveTrackHandler, + createResolveActiveSubtitleSidebarSourceHandler, +} from './main/runtime/subtitle-prefetch-runtime'; +import { + createCreateAnilistSetupWindowHandler, + createCreateFirstRunSetupWindowHandler, + createCreateJellyfinSetupWindowHandler, +} from './main/runtime/setup-window-factory'; import { isYoutubePlaybackActive } from './main/runtime/youtube-playback'; import { createYomitanProfilePolicy } from './main/runtime/yomitan-profile-policy'; import { formatSkippedYomitanWriteAction } from './main/runtime/yomitan-read-only-log'; @@ -455,8 +487,7 @@ import { } from './config'; import { resolveConfigDir } from './config/path-resolution'; import { parseSubtitleCues } from './core/services/subtitle-cue-parser'; -import { createSubtitlePrefetchService } from './core/services/subtitle-prefetch'; -import type { SubtitlePrefetchService } from './core/services/subtitle-prefetch'; +import { createSubtitlePrefetchService, type SubtitlePrefetchService } from './core/services/subtitle-prefetch'; import { buildSubtitleSidebarSourceKey, resolveSubtitleSourcePath, @@ -489,9 +520,6 @@ const ANILIST_DEVELOPER_SETTINGS_URL = 'https://anilist.co/settings/developer'; const ANILIST_UPDATE_MIN_WATCH_SECONDS = 10 * 60; const ANILIST_DURATION_RETRY_INTERVAL_MS = 15_000; const ANILIST_MAX_ATTEMPTED_UPDATE_KEYS = 1000; -const ANILIST_TOKEN_STORE_FILE = 'anilist-token-store.json'; -const JELLYFIN_TOKEN_STORE_FILE = 'jellyfin-token-store.json'; -const ANILIST_RETRY_QUEUE_FILE = 'anilist-retry-queue.json'; const TRAY_TOOLTIP = 'SubMiner'; let anilistMediaGuessRuntimeState: AnilistMediaGuessRuntimeState = @@ -500,9 +528,6 @@ let anilistUpdateInFlightState = createInitialAnilistUpdateInFlightState(); const anilistAttemptedUpdateKeys = new Set(); let anilistCachedAccessToken: string | null = null; let jellyfinPlayQuitOnDisconnectArmed = false; -let youtubePlayQuitOnDisconnectArmed = false; -let youtubePlayQuitOnDisconnectArmTimer: ReturnType | null = null; -let youtubePlaybackFlowGeneration = 0; const JELLYFIN_LANG_PREF = 'ja,jp,jpn,japanese,en,eng,english,enUS,en-US'; const JELLYFIN_TICKS_PER_SECOND = 10_000_000; const JELLYFIN_REMOTE_PROGRESS_INTERVAL_MS = 3000; @@ -546,64 +571,12 @@ function applyJellyfinMpvDefaults( applyJellyfinMpvDefaultsHandler(client); } -const CONFIG_DIR = resolveConfigDir({ - platform: process.platform, - appDataDir: process.env.APPDATA, - xdgConfigHome: process.env.XDG_CONFIG_HOME, - homeDir: os.homedir(), - existsSync: fs.existsSync, -}); -const USER_DATA_PATH = CONFIG_DIR; -const DEFAULT_MPV_LOG_PATH = process.env.SUBMINER_MPV_LOG?.trim() || DEFAULT_MPV_LOG_FILE; -const DEFAULT_IMMERSION_DB_PATH = path.join(USER_DATA_PATH, 'immersion.sqlite'); -const configService = (() => { - try { - return new ConfigService(CONFIG_DIR); - } catch (error) { - if (error instanceof ConfigStartupParseError) { - failStartupFromConfig( - 'SubMiner config parse error', - buildConfigParseErrorDetails(error.path, error.parseError), - { - logError: (details) => console.error(details), - showErrorBox: (title, details) => dialog.showErrorBox(title, details), - quit: () => requestAppQuit(), - }, - ); - } - throw error; - } -})(); -const anilistTokenStore = createAnilistTokenStore( - path.join(USER_DATA_PATH, ANILIST_TOKEN_STORE_FILE), - { - info: (message: string) => console.info(message), - warn: (message: string, details?: unknown) => console.warn(message, details), - error: (message: string, details?: unknown) => console.error(message, details), - warnUser: (message: string) => notifyAnilistTokenStoreWarning(message), - }, -); -const jellyfinTokenStore = createJellyfinTokenStore( - path.join(USER_DATA_PATH, JELLYFIN_TOKEN_STORE_FILE), - { - info: (message: string) => console.info(message), - warn: (message: string, details?: unknown) => console.warn(message, details), - error: (message: string, details?: unknown) => console.error(message, details), - }, -); -const anilistUpdateQueue = createAnilistUpdateQueue( - path.join(USER_DATA_PATH, ANILIST_RETRY_QUEUE_FILE), - { - info: (message: string) => console.info(message), - warn: (message: string, details?: unknown) => console.warn(message, details), - error: (message: string, details?: unknown) => console.error(message, details), - }, -); const isDev = process.argv.includes('--dev') || process.argv.includes('--debug'); const texthookerService = new Texthooker(() => { const config = getResolvedConfig(); const characterDictionaryEnabled = - config.anilist.characterDictionary.enabled && yomitanProfilePolicy.isCharacterDictionaryEnabled(); + config.anilist.characterDictionary.enabled && + yomitanProfilePolicy.isCharacterDictionaryEnabled(); const knownAndNPlusOneEnabled = getRuntimeBooleanOption( 'subtitle.annotation.nPlusOne', config.ankiConnect.knownWords.highlightEnabled, @@ -634,9 +607,139 @@ const texthookerService = new Texthooker(() => { }, }; }); -const subtitleWsService = new SubtitleWebSocket(); -const annotationSubtitleWsService = new SubtitleWebSocket(); -const logger = createLogger('main'); +let syncOverlayShortcutsForModal: (isActive: boolean) => void = () => {}; +let syncOverlayVisibilityForModal: () => void = () => {}; +const buildGetDefaultSocketPathMainDepsHandler = createBuildGetDefaultSocketPathMainDepsHandler({ + platform: process.platform, +}); +const getDefaultSocketPathMainDeps = buildGetDefaultSocketPathMainDepsHandler(); +const getDefaultSocketPathHandler = createGetDefaultSocketPathHandler(getDefaultSocketPathMainDeps); + +function getDefaultSocketPath(): string { + return getDefaultSocketPathHandler(); +} + +type BootServices = MainBootServicesResult< + ConfigService, + ReturnType, + ReturnType, + ReturnType, + SubtitleWebSocket, + ReturnType, + ReturnType, + ReturnType, + ReturnType, + ReturnType, + ReturnType, + ReturnType, + { + requestSingleInstanceLock: () => boolean; + quit: () => void; + on: (event: string, listener: (...args: unknown[]) => void) => unknown; + whenReady: () => Promise; + } +>; + +const bootServices = createMainBootServices({ + platform: process.platform, + argv: process.argv, + appDataDir: process.env.APPDATA, + xdgConfigHome: process.env.XDG_CONFIG_HOME, + homeDir: os.homedir(), + defaultMpvLogFile: DEFAULT_MPV_LOG_FILE, + envMpvLog: process.env.SUBMINER_MPV_LOG, + defaultTexthookerPort: DEFAULT_TEXTHOOKER_PORT, + getDefaultSocketPath: () => getDefaultSocketPath(), + resolveConfigDir, + existsSync: fs.existsSync, + mkdirSync: fs.mkdirSync, + joinPath: (...parts) => path.join(...parts), + app, + shouldBypassSingleInstanceLock: () => shouldBypassSingleInstanceLockForArgv(process.argv), + requestSingleInstanceLockEarly: () => requestSingleInstanceLockEarly(app), + registerSecondInstanceHandlerEarly: (listener) => { + registerSecondInstanceHandlerEarly(app, listener); + }, + onConfigStartupParseError: (error) => { + failStartupFromConfig( + 'SubMiner config parse error', + buildConfigParseErrorDetails(error.path, error.parseError), + { + logError: (details) => console.error(details), + showErrorBox: (title, details) => dialog.showErrorBox(title, details), + quit: () => requestAppQuit(), + }, + ); + }, + createConfigService: (configDir) => new ConfigService(configDir), + createAnilistTokenStore: (targetPath) => + createAnilistTokenStore(targetPath, { + info: (message: string) => console.info(message), + warn: (message: string, details?: unknown) => console.warn(message, details), + error: (message: string, details?: unknown) => console.error(message, details), + warnUser: (message: string) => notifyAnilistTokenStoreWarning(message), + }), + createJellyfinTokenStore: (targetPath) => + createJellyfinTokenStore(targetPath, { + info: (message: string) => console.info(message), + warn: (message: string, details?: unknown) => console.warn(message, details), + error: (message: string, details?: unknown) => console.error(message, details), + }), + createAnilistUpdateQueue: (targetPath) => + createAnilistUpdateQueue(targetPath, { + info: (message: string) => console.info(message), + warn: (message: string, details?: unknown) => console.warn(message, details), + error: (message: string, details?: unknown) => console.error(message, details), + }), + createSubtitleWebSocket: () => new SubtitleWebSocket(), + createLogger, + createMainRuntimeRegistry, + createOverlayManager, + createOverlayModalInputState, + createOverlayContentMeasurementStore: ({ logger }) => { + const buildHandler = createBuildOverlayContentMeasurementStoreMainDepsHandler({ + now: () => Date.now(), + warn: (message: string) => logger.warn(message), + }); + return createOverlayContentMeasurementStore(buildHandler()); + }, + getSyncOverlayShortcutsForModal: () => syncOverlayShortcutsForModal, + getSyncOverlayVisibilityForModal: () => syncOverlayVisibilityForModal, + createOverlayModalRuntime: ({ overlayManager, overlayModalInputState }) => { + const buildHandler = createBuildOverlayModalRuntimeMainDepsHandler({ + getMainWindow: () => overlayManager.getMainWindow(), + getModalWindow: () => overlayManager.getModalWindow(), + createModalWindow: () => createModalWindow(), + getModalGeometry: () => getCurrentOverlayGeometry(), + setModalWindowBounds: (geometry) => overlayManager.setModalWindowBounds(geometry), + }); + return createOverlayModalRuntimeService(buildHandler(), { + onModalStateChange: (isActive: boolean) => + overlayModalInputState.handleModalInputStateChange(isActive), + }); + }, + createAppState, +}) as BootServices; +const { + configDir: CONFIG_DIR, + userDataPath: USER_DATA_PATH, + defaultMpvLogPath: DEFAULT_MPV_LOG_PATH, + defaultImmersionDbPath: DEFAULT_IMMERSION_DB_PATH, + configService, + anilistTokenStore, + jellyfinTokenStore, + anilistUpdateQueue, + subtitleWsService, + annotationSubtitleWsService, + logger, + runtimeRegistry, + overlayManager, + overlayModalInputState, + overlayContentMeasurementStore, + overlayModalRuntime, + appState, + appLifecycleApp, +} = bootServices; notifyAnilistTokenStoreWarning = (message: string) => { logger.warn(`[AniList] ${message}`); try { @@ -671,41 +774,6 @@ const appLogger = { ); }, }; -const runtimeRegistry = createMainRuntimeRegistry(); -const appLifecycleApp = { - requestSingleInstanceLock: () => - shouldBypassSingleInstanceLockForArgv(process.argv) - ? true - : requestSingleInstanceLockEarly(app), - quit: () => app.quit(), - on: (event: string, listener: (...args: unknown[]) => void) => { - if (event === 'second-instance') { - registerSecondInstanceHandlerEarly( - app, - listener as (_event: unknown, argv: string[]) => void, - ); - return app; - } - app.on(event as Parameters[0], listener as (...args: any[]) => void); - return app; - }, - whenReady: () => app.whenReady(), -}; - -const buildGetDefaultSocketPathMainDepsHandler = createBuildGetDefaultSocketPathMainDepsHandler({ - platform: process.platform, -}); -const getDefaultSocketPathMainDeps = buildGetDefaultSocketPathMainDepsHandler(); -const getDefaultSocketPathHandler = createGetDefaultSocketPathHandler(getDefaultSocketPathMainDeps); - -function getDefaultSocketPath(): string { - return getDefaultSocketPathHandler(); -} - -if (!fs.existsSync(USER_DATA_PATH)) { - fs.mkdirSync(USER_DATA_PATH, { recursive: true }); -} -app.setPath('userData', USER_DATA_PATH); let forceQuitTimer: ReturnType | null = null; let statsServer: ReturnType | null = null; @@ -767,55 +835,6 @@ process.on('SIGTERM', () => { requestAppQuit(); }); -const overlayManager = createOverlayManager(); -let overlayModalInputExclusive = false; -let syncOverlayShortcutsForModal: (isActive: boolean) => void = () => {}; -let syncOverlayVisibilityForModal: () => void = () => {}; - -const handleModalInputStateChange = (isActive: boolean): void => { - if (overlayModalInputExclusive === isActive) return; - overlayModalInputExclusive = isActive; - if (isActive) { - const modalWindow = overlayManager.getModalWindow(); - if (modalWindow && !modalWindow.isDestroyed()) { - modalWindow.setIgnoreMouseEvents(false); - modalWindow.setAlwaysOnTop(true, 'screen-saver', 1); - modalWindow.focus(); - if (!modalWindow.webContents.isFocused()) { - modalWindow.webContents.focus(); - } - } - } - syncOverlayShortcutsForModal(isActive); - syncOverlayVisibilityForModal(); -}; - -const buildOverlayContentMeasurementStoreMainDepsHandler = - createBuildOverlayContentMeasurementStoreMainDepsHandler({ - now: () => Date.now(), - warn: (message: string) => logger.warn(message), - }); -const buildOverlayModalRuntimeMainDepsHandler = createBuildOverlayModalRuntimeMainDepsHandler({ - getMainWindow: () => overlayManager.getMainWindow(), - getModalWindow: () => overlayManager.getModalWindow(), - createModalWindow: () => createModalWindow(), - getModalGeometry: () => getCurrentOverlayGeometry(), - setModalWindowBounds: (geometry) => overlayManager.setModalWindowBounds(geometry), -}); -const overlayContentMeasurementStoreMainDeps = buildOverlayContentMeasurementStoreMainDepsHandler(); -const overlayContentMeasurementStore = createOverlayContentMeasurementStore( - overlayContentMeasurementStoreMainDeps, -); -const overlayModalRuntime = createOverlayModalRuntimeService( - buildOverlayModalRuntimeMainDepsHandler(), - { - onModalStateChange: (isActive: boolean) => handleModalInputStateChange(isActive), - }, -); -const appState = createAppState({ - mpvSocketPath: getDefaultSocketPath(), - texthookerPort: DEFAULT_TEXTHOOKER_PORT, -}); const startBackgroundWarmupsIfAllowed = (): void => { startBackgroundWarmups(); }; @@ -828,7 +847,8 @@ const youtubeFlowRuntime = createYoutubeFlowRuntime({ { sendToActiveOverlayWindow: (channel, nextPayload, runtimeOptions) => overlayModalRuntime.sendToActiveOverlayWindow(channel, nextPayload, runtimeOptions), - waitForModalOpen: (modal, timeoutMs) => overlayModalRuntime.waitForModalOpen(modal, timeoutMs), + waitForModalOpen: (modal, timeoutMs) => + overlayModalRuntime.waitForModalOpen(modal, timeoutMs), logWarn: (message) => logger.warn(message), }, payload, @@ -852,7 +872,7 @@ const youtubeFlowRuntime = createYoutubeFlowRuntime({ subtitleProcessingController.refreshCurrentSubtitle(text); }, refreshSubtitleSidebarSource: async (sourcePath: string) => { - await refreshSubtitleSidebarFromSource(sourcePath); + await subtitlePrefetchRuntime.refreshSubtitleSidebarFromSource(sourcePath); }, startTokenizationWarmups: async () => { await startTokenizationWarmups(); @@ -871,7 +891,10 @@ const youtubeFlowRuntime = createYoutubeFlowRuntime({ await Promise.race([ integration.waitUntilReady(), new Promise((_, reject) => { - setTimeout(() => reject(new Error('Timed out waiting for AnkiConnect integration')), 2500); + setTimeout( + () => reject(new Error('Timed out waiting for AnkiConnect integration')), + 2500, + ); }), ]); } catch (error) { @@ -962,114 +985,51 @@ const waitForYoutubeMpvConnected = createWaitForMpvConnectedHandler({ now: () => Date.now(), sleep: (delayMs) => new Promise((resolve) => setTimeout(resolve, delayMs)), }); - -function clearYoutubePlayQuitOnDisconnectArmTimer(): void { - if (youtubePlayQuitOnDisconnectArmTimer) { - clearTimeout(youtubePlayQuitOnDisconnectArmTimer); - youtubePlayQuitOnDisconnectArmTimer = null; - } -} - -function invalidatePendingAutoplayReadyFallbacks(): void { - autoPlayReadySignalMediaPath = null; - autoPlayReadySignalGeneration += 1; -} - -async function runYoutubePlaybackFlowMain(request: { - url: string; - mode: NonNullable; - source: CliCommandSource; -}): Promise { - const flowGeneration = ++youtubePlaybackFlowGeneration; - invalidatePendingAutoplayReadyFallbacks(); - youtubePrimarySubtitleNotificationRuntime.setAppOwnedFlowInFlight(true); - let flowCompleted = false; - try { - clearYoutubePlayQuitOnDisconnectArmTimer(); - youtubePlayQuitOnDisconnectArmed = false; +const autoplayReadyGate = createAutoplayReadyGate({ + isAppOwnedFlowInFlight: () => youtubePrimarySubtitleNotificationRuntime.isAppOwnedFlowInFlight(), + getCurrentMediaPath: () => appState.currentMediaPath, + getCurrentVideoPath: () => appState.mpvClient?.currentVideoPath ?? null, + getPlaybackPaused: () => appState.playbackPaused, + getMpvClient: () => appState.mpvClient, + signalPluginAutoplayReady: () => { + sendMpvCommandRuntime(appState.mpvClient, ['script-message', 'subminer-autoplay-ready']); + }, + schedule: (callback, delayMs) => setTimeout(callback, delayMs), + logDebug: (message) => logger.debug(message), +}); +const youtubePlaybackRuntime = createYoutubePlaybackRuntime({ + platform: process.platform, + directPlaybackFormat: YOUTUBE_DIRECT_PLAYBACK_FORMAT, + mpvYtdlFormat: YOUTUBE_MPV_YTDL_FORMAT, + autoLaunchTimeoutMs: YOUTUBE_MPV_AUTO_LAUNCH_TIMEOUT_MS, + connectTimeoutMs: YOUTUBE_MPV_CONNECT_TIMEOUT_MS, + getSocketPath: () => appState.mpvSocketPath, + getMpvConnected: () => Boolean(appState.mpvClient?.connected), + invalidatePendingAutoplayReadyFallbacks: () => + autoplayReadyGate.invalidatePendingAutoplayReadyFallbacks(), + setAppOwnedFlowInFlight: (next) => { + youtubePrimarySubtitleNotificationRuntime.setAppOwnedFlowInFlight(next); + }, + ensureYoutubePlaybackRuntimeReady: async () => { await ensureYoutubePlaybackRuntimeReady(); - let playbackUrl = request.url; - let launchedWindowsMpv = false; - if (process.platform === 'win32') { - try { - playbackUrl = await resolveYoutubePlaybackUrl(request.url, YOUTUBE_DIRECT_PLAYBACK_FORMAT); - logger.info('Resolved direct YouTube playback URL for Windows MPV startup.'); - } catch (error) { - logger.warn( - `Failed to resolve direct YouTube playback URL; falling back to page URL: ${ - error instanceof Error ? error.message : String(error) - }`, - ); - } - } - if (process.platform === 'win32' && !appState.mpvClient?.connected) { - const launchResult = launchWindowsMpv( - [playbackUrl], - createWindowsMpvLaunchDeps({ - showError: (title, content) => dialog.showErrorBox(title, content), - }), - [ - '--pause=yes', - '--ytdl=yes', - `--ytdl-format=${YOUTUBE_MPV_YTDL_FORMAT}`, - '--sub-auto=no', - '--sub-file-paths=.;subs;subtitles', - '--sid=auto', - '--secondary-sid=auto', - '--secondary-sub-visibility=no', - '--alang=ja,jp,jpn,japanese,en,eng,english,enus,en-us', - '--slang=ja,jp,jpn,japanese,en,eng,english,enus,en-us', - `--log-file=${DEFAULT_MPV_LOG_PATH}`, - `--input-ipc-server=${appState.mpvSocketPath}`, - ], - ); - launchedWindowsMpv = launchResult.ok; - if (launchResult.ok) { - logger.info(`Bootstrapping Windows mpv for YouTube playback via ${launchResult.mpvPath}`); - } - if (!launchResult.ok) { - logger.warn('Unable to bootstrap Windows mpv for YouTube playback.'); - } - } - const connected = await waitForYoutubeMpvConnected( - launchedWindowsMpv ? YOUTUBE_MPV_AUTO_LAUNCH_TIMEOUT_MS : YOUTUBE_MPV_CONNECT_TIMEOUT_MS, - ); - if (!connected) { - throw new Error( - launchedWindowsMpv - ? 'MPV not connected after auto-launch. Ensure mpv is installed and can open the requested YouTube URL.' - : 'MPV not connected. Start mpv with the SubMiner profile or retry after mpv finishes starting.', - ); - } - if (request.source === 'initial') { - youtubePlayQuitOnDisconnectArmTimer = setTimeout(() => { - if (youtubePlaybackFlowGeneration !== flowGeneration) { - return; - } - youtubePlayQuitOnDisconnectArmed = true; - youtubePlayQuitOnDisconnectArmTimer = null; - }, 3000); - } - const mediaReady = await prepareYoutubePlaybackInMpv({ url: playbackUrl }); - if (!mediaReady) { - throw new Error('Timed out waiting for mpv to load the requested YouTube URL.'); - } - await youtubeFlowRuntime.runYoutubePlaybackFlow({ - url: request.url, - mode: request.mode, - }); - flowCompleted = true; - logger.info(`YouTube playback flow completed from ${request.source}.`); - } finally { - if (youtubePlaybackFlowGeneration === flowGeneration) { - if (!flowCompleted) { - clearYoutubePlayQuitOnDisconnectArmTimer(); - youtubePlayQuitOnDisconnectArmed = false; - } - youtubePrimarySubtitleNotificationRuntime.setAppOwnedFlowInFlight(false); - } - } -} + }, + resolveYoutubePlaybackUrl: (url, format) => resolveYoutubePlaybackUrl(url, format), + launchWindowsMpv: (playbackUrl, args) => + launchWindowsMpv( + [playbackUrl], + createWindowsMpvLaunchDeps({ + showError: (title, content) => dialog.showErrorBox(title, content), + }), + [...args, `--log-file=${DEFAULT_MPV_LOG_PATH}`], + ), + waitForYoutubeMpvConnected: (timeoutMs) => waitForYoutubeMpvConnected(timeoutMs), + prepareYoutubePlaybackInMpv: (request) => prepareYoutubePlaybackInMpv(request), + runYoutubePlaybackFlow: (request) => youtubeFlowRuntime.runYoutubePlaybackFlow(request), + logInfo: (message) => logger.info(message), + logWarn: (message) => logger.warn(message), + schedule: (callback, delayMs) => setTimeout(callback, delayMs), + clearScheduled: (timer) => clearTimeout(timer), +}); let firstRunSetupMessage: string | null = null; const resolveWindowsMpvShortcutRuntimePaths = () => @@ -1077,6 +1037,12 @@ const resolveWindowsMpvShortcutRuntimePaths = () => appDataDir: app.getPath('appData'), desktopDir: app.getPath('desktop'), }); +syncInstalledFirstRunPluginBinaryPath({ + platform: process.platform, + homeDir: os.homedir(), + xdgConfigHome: process.env.XDG_CONFIG_HOME, + binaryPath: process.execPath, +}); const firstRunSetupService = createFirstRunSetupService({ platform: process.platform, configDir: CONFIG_DIR, @@ -1106,6 +1072,7 @@ const firstRunSetupService = createFirstRunSetupService({ dirname: __dirname, appPath: app.getAppPath(), resourcesPath: process.resourcesPath, + binaryPath: process.execPath, }), detectWindowsMpvShortcuts: () => { if (process.platform !== 'win32') { @@ -1141,60 +1108,21 @@ const firstRunSetupService = createFirstRunSetupService({ }); const discordPresenceSessionStartedAtMs = Date.now(); let discordPresenceMediaDurationSec: number | null = null; - -function refreshDiscordPresenceMediaDuration(): void { - const client = appState.mpvClient; - if (!client || !client.connected) return; - void client - .requestProperty('duration') - .then((value) => { - const numeric = Number(value); - discordPresenceMediaDurationSec = Number.isFinite(numeric) && numeric > 0 ? numeric : null; - }) - .catch(() => { - discordPresenceMediaDurationSec = null; - }); -} - -function publishDiscordPresence(): void { - const discordPresenceService = appState.discordPresenceService; - if (!discordPresenceService || getResolvedConfig().discordPresence.enabled !== true) { - return; - } - - refreshDiscordPresenceMediaDuration(); - discordPresenceService.publish({ - mediaTitle: appState.currentMediaTitle, - mediaPath: appState.currentMediaPath, - subtitleText: appState.currentSubText, - currentTimeSec: appState.mpvClient?.currentTimePos ?? null, - mediaDurationSec: - discordPresenceMediaDurationSec ?? anilistMediaGuessRuntimeState.mediaDurationSec, - paused: appState.playbackPaused, - connected: Boolean(appState.mpvClient?.connected), - sessionStartedAtMs: discordPresenceSessionStartedAtMs, - }); -} - -function createDiscordRpcClient() { - const discordRpc = require('discord-rpc') as { - Client: new (opts: { transport: 'ipc' }) => { - login: (opts: { clientId: string }) => Promise; - setActivity: (activity: Record) => Promise; - clearActivity: () => Promise; - destroy: () => void; - }; - }; - const client = new discordRpc.Client({ transport: 'ipc' }); - - return { - login: () => client.login({ clientId: DISCORD_PRESENCE_APP_ID }), - setActivity: (activity: unknown) => - client.setActivity(activity as unknown as Record), - clearActivity: () => client.clearActivity(), - destroy: () => client.destroy(), - }; -} +const discordPresenceRuntime = createDiscordPresenceRuntime({ + getDiscordPresenceService: () => appState.discordPresenceService, + isDiscordPresenceEnabled: () => getResolvedConfig().discordPresence.enabled === true, + getMpvClient: () => appState.mpvClient, + getCurrentMediaTitle: () => appState.currentMediaTitle, + getCurrentMediaPath: () => appState.currentMediaPath, + getCurrentSubtitleText: () => appState.currentSubText, + getPlaybackPaused: () => appState.playbackPaused, + getFallbackMediaDurationSec: () => anilistMediaGuessRuntimeState.mediaDurationSec, + getSessionStartedAtMs: () => discordPresenceSessionStartedAtMs, + getMediaDurationSec: () => discordPresenceMediaDurationSec, + setMediaDurationSec: (next) => { + discordPresenceMediaDurationSec = next; + }, +}); async function initializeDiscordPresenceService(): Promise { if (getResolvedConfig().discordPresence.enabled !== true) { @@ -1204,11 +1132,11 @@ async function initializeDiscordPresenceService(): Promise { appState.discordPresenceService = createDiscordPresenceService({ config: getResolvedConfig().discordPresence, - createClient: () => createDiscordRpcClient(), + createClient: () => createDiscordRpcClient(DISCORD_PRESENCE_APP_ID), logDebug: (message, meta) => logger.debug(message, meta), }); await appState.discordPresenceService.start(); - publishDiscordPresence(); + discordPresenceRuntime.publishDiscordPresence(); } const ensureOverlayMpvSubtitlesHidden = createEnsureOverlayMpvSubtitlesHiddenHandler({ getMpvClient: () => appState.mpvClient, @@ -1318,8 +1246,6 @@ const statsCoverArtFetcher = createCoverArtFetcher( const anilistStateRuntime = createAnilistStateRuntime(buildAnilistStateRuntimeMainDepsHandler()); const configDerivedRuntime = createConfigDerivedRuntime(buildConfigDerivedRuntimeMainDepsHandler()); const subsyncRuntime = createMainSubsyncRuntime(buildMainSubsyncRuntimeMainDepsHandler()); -let autoPlayReadySignalMediaPath: string | null = null; -let autoPlayReadySignalGeneration = 0; const currentMediaTokenizationGate = createCurrentMediaTokenizationGate(); const startupOsdSequencer = createStartupOsdSequencer({ showOsd: (message) => showMpvOsd(message), @@ -1368,110 +1294,6 @@ async function openYoutubeTrackPickerFromPlayback(): Promise { }); } -function maybeSignalPluginAutoplayReady( - payload: SubtitleData, - options?: { forceWhilePaused?: boolean }, -): void { - if (youtubePrimarySubtitleNotificationRuntime.isAppOwnedFlowInFlight()) { - logger.debug('[autoplay-ready] suppressed while app-owned YouTube flow is active'); - return; - } - if (!payload.text.trim()) { - return; - } - const mediaPath = - appState.currentMediaPath?.trim() || - appState.mpvClient?.currentVideoPath?.trim() || - '__unknown__'; - const duplicateMediaSignal = autoPlayReadySignalMediaPath === mediaPath; - const allowDuplicateWhilePaused = - options?.forceWhilePaused === true && appState.playbackPaused !== false; - if (duplicateMediaSignal && !allowDuplicateWhilePaused) { - return; - } - const signalPluginAutoplayReady = (): void => { - logger.debug(`[autoplay-ready] signaling mpv for media: ${mediaPath}`); - sendMpvCommandRuntime(appState.mpvClient, ['script-message', 'subminer-autoplay-ready']); - }; - if (duplicateMediaSignal && allowDuplicateWhilePaused) { - // Keep re-notifying the plugin while paused (for startup visibility sync), but - // do not run local unpause fallback on duplicates to avoid resuming user-paused playback. - signalPluginAutoplayReady(); - return; - } - autoPlayReadySignalMediaPath = mediaPath; - const playbackGeneration = ++autoPlayReadySignalGeneration; - signalPluginAutoplayReady(); - const isPlaybackPaused = async (client: { - requestProperty: (property: string) => Promise; - }): Promise => { - try { - const pauseProperty = await client.requestProperty('pause'); - if (typeof pauseProperty === 'boolean') { - return pauseProperty; - } - if (typeof pauseProperty === 'string') { - return pauseProperty.toLowerCase() !== 'no' && pauseProperty !== '0'; - } - if (typeof pauseProperty === 'number') { - return pauseProperty !== 0; - } - logger.debug( - `[autoplay-ready] unrecognized pause property for media ${mediaPath}: ${String(pauseProperty)}`, - ); - } catch (error) { - logger.debug( - `[autoplay-ready] failed to read pause property for media ${mediaPath}: ${(error as Error).message}`, - ); - } - return true; - }; - - // Fallback: repeatedly try to release pause for a short window in case startup - // gate arming and tokenization-ready signal arrive out of order. - const releaseRetryDelayMs = 200; - const maxReleaseAttempts = resolveAutoplayReadyMaxReleaseAttempts({ - forceWhilePaused: options?.forceWhilePaused === true, - retryDelayMs: releaseRetryDelayMs, - }); - const attemptRelease = (attempt: number): void => { - void (async () => { - if ( - autoPlayReadySignalMediaPath !== mediaPath || - playbackGeneration !== autoPlayReadySignalGeneration - ) { - return; - } - - const mpvClient = appState.mpvClient; - if (!mpvClient?.connected) { - if (attempt < maxReleaseAttempts) { - setTimeout(() => attemptRelease(attempt + 1), releaseRetryDelayMs); - } - return; - } - - const shouldUnpause = await isPlaybackPaused(mpvClient); - logger.debug( - `[autoplay-ready] mpv paused before fallback attempt ${attempt} for ${mediaPath}: ${shouldUnpause}`, - ); - if (!shouldUnpause) { - if (attempt === 0) { - logger.debug('[autoplay-ready] mpv already playing; no fallback unpause needed'); - } - return; - } - - signalPluginAutoplayReady(); - mpvClient.send({ command: ['set_property', 'pause', false] }); - if (attempt < maxReleaseAttempts) { - setTimeout(() => attemptRelease(attempt + 1), releaseRetryDelayMs); - } - })(); - }; - attemptRelease(0); -} - let appTray: Tray | null = null; let tokenizeSubtitleDeferred: ((text: string) => Promise) | null = null; function withCurrentSubtitleTiming(payload: SubtitleData): SubtitleData { @@ -1545,6 +1367,11 @@ const subtitlePrefetchInitController = createSubtitlePrefetchInitController({ appState.activeParsedSubtitleSource = sourceKey; }, }); +const resolveActiveSubtitleSidebarSourceHandler = createResolveActiveSubtitleSidebarSourceHandler({ + getFfmpegPath: () => getResolvedConfig().subsync.ffmpeg_path.trim() || 'ffmpeg', + extractInternalSubtitleTrack: (ffmpegPath, videoPath, track) => + extractInternalSubtitleTrackToTempFile(ffmpegPath, videoPath, track), +}); async function refreshSubtitleSidebarFromSource(sourcePath: string): Promise { const normalizedSourcePath = resolveSubtitleSourcePath(sourcePath.trim()); @@ -1557,60 +1384,30 @@ async function refreshSubtitleSidebarFromSource(sourcePath: string): Promise { - const client = appState.mpvClient; - if (!client?.connected) { - return; - } - - try { - const [currentExternalFilenameRaw, currentTrackRaw, trackListRaw, sidRaw, videoPathRaw] = - await Promise.all([ - client.requestProperty('current-tracks/sub/external-filename').catch(() => null), - client.requestProperty('current-tracks/sub').catch(() => null), - client.requestProperty('track-list'), - client.requestProperty('sid'), - client.requestProperty('path'), - ]); - const videoPath = typeof videoPathRaw === 'string' ? videoPathRaw : ''; - if (!videoPath) { - subtitlePrefetchInitController.cancelPendingInit(); - return; - } - - const resolvedSource = await resolveActiveSubtitleSidebarSource( - currentExternalFilenameRaw, - currentTrackRaw, - trackListRaw, - sidRaw, - videoPath, - ); - if (!resolvedSource) { - subtitlePrefetchInitController.cancelPendingInit(); - return; - } - try { - await subtitlePrefetchInitController.initSubtitlePrefetch( - resolvedSource.path, - lastObservedTimePos, - resolvedSource.sourceKey, - ); - } finally { - await resolvedSource.cleanup?.(); - } - } catch { - // Track list query failed; skip subtitle prefetch refresh. - } -} +const refreshSubtitlePrefetchFromActiveTrackHandler = + createRefreshSubtitlePrefetchFromActiveTrackHandler({ + getMpvClient: () => appState.mpvClient, + getLastObservedTimePos: () => lastObservedTimePos, + subtitlePrefetchInitController, + resolveActiveSubtitleSidebarSource: (input) => + resolveActiveSubtitleSidebarSourceHandler(input), + }); function scheduleSubtitlePrefetchRefresh(delayMs = 0): void { clearScheduledSubtitlePrefetchRefresh(); subtitlePrefetchRefreshTimer = setTimeout(() => { subtitlePrefetchRefreshTimer = null; - void refreshSubtitlePrefetchFromActiveTrack(); + void refreshSubtitlePrefetchFromActiveTrackHandler(); }, delayMs); } +const subtitlePrefetchRuntime = { + cancelPendingInit: () => subtitlePrefetchInitController.cancelPendingInit(), + initSubtitlePrefetch: subtitlePrefetchInitController.initSubtitlePrefetch, + refreshSubtitleSidebarFromSource: (sourcePath: string) => refreshSubtitleSidebarFromSource(sourcePath), + refreshSubtitlePrefetchFromActiveTrack: () => refreshSubtitlePrefetchFromActiveTrackHandler(), + scheduleSubtitlePrefetchRefresh: (delayMs?: number) => scheduleSubtitlePrefetchRefresh(delayMs), + clearScheduledSubtitlePrefetchRefresh: () => clearScheduledSubtitlePrefetchRefresh(), +} as const; const overlayShortcutsRuntime = createOverlayShortcutsRuntimeService( createBuildOverlayShortcutsRuntimeMainDepsHandler({ @@ -2002,7 +1799,7 @@ const characterDictionaryAutoSyncRuntime = createCharacterDictionaryAutoSyncRunt const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService( createBuildOverlayVisibilityRuntimeMainDepsHandler({ getMainWindow: () => overlayManager.getMainWindow(), - getModalActive: () => overlayModalInputExclusive, + getModalActive: () => overlayModalInputState.getModalInputExclusive(), getVisibleOverlayVisible: () => overlayManager.getVisibleOverlayVisible(), getForceMousePassthrough: () => appState.statsOverlayVisible, getWindowTracker: () => appState.windowTracker, @@ -2041,12 +1838,10 @@ const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService( }, })(), ); - -const buildGetRuntimeOptionsStateMainDepsHandler = createBuildGetRuntimeOptionsStateMainDepsHandler( - { +const buildGetRuntimeOptionsStateMainDepsHandler = + createBuildGetRuntimeOptionsStateMainDepsHandler({ getRuntimeOptionsManager: () => appState.runtimeOptionsManager, - }, -); + }); const getRuntimeOptionsStateMainDeps = buildGetRuntimeOptionsStateMainDepsHandler(); const getRuntimeOptionsStateHandler = createGetRuntimeOptionsStateHandler( getRuntimeOptionsStateMainDeps, @@ -2067,14 +1862,6 @@ const buildRestorePreviousSecondarySubVisibilityMainDepsHandler = syncOverlayVisibilityForModal = () => { overlayVisibilityRuntime.updateVisibleOverlayVisibility(); }; -const restorePreviousSecondarySubVisibilityMainDeps = - buildRestorePreviousSecondarySubVisibilityMainDepsHandler(); -const restorePreviousSecondarySubVisibilityHandler = - createRestorePreviousSecondarySubVisibilityHandler(restorePreviousSecondarySubVisibilityMainDeps); - -function restorePreviousSecondarySubVisibility(): void { - restorePreviousSecondarySubVisibilityHandler(); -} function broadcastToOverlayWindows(channel: string, ...args: unknown[]): void { overlayManager.broadcastToOverlayWindows(channel, ...args); @@ -2086,32 +1873,12 @@ const buildBroadcastRuntimeOptionsChangedMainDepsHandler = getRuntimeOptionsState: () => getRuntimeOptionsState(), broadcastToOverlayWindows: (channel, ...args) => broadcastToOverlayWindows(channel, ...args), }); -const broadcastRuntimeOptionsChangedMainDeps = buildBroadcastRuntimeOptionsChangedMainDepsHandler(); -const broadcastRuntimeOptionsChangedHandler = createBroadcastRuntimeOptionsChangedHandler( - broadcastRuntimeOptionsChangedMainDeps, -); - -function broadcastRuntimeOptionsChanged(): void { - broadcastRuntimeOptionsChangedHandler(); -} const buildSendToActiveOverlayWindowMainDepsHandler = createBuildSendToActiveOverlayWindowMainDepsHandler({ sendToActiveOverlayWindowRuntime: (channel, payload, runtimeOptions) => overlayModalRuntime.sendToActiveOverlayWindow(channel, payload, runtimeOptions), }); -const sendToActiveOverlayWindowMainDeps = buildSendToActiveOverlayWindowMainDepsHandler(); -const sendToActiveOverlayWindowHandler = createSendToActiveOverlayWindowHandler( - sendToActiveOverlayWindowMainDeps, -); - -function sendToActiveOverlayWindow( - channel: string, - payload?: unknown, - runtimeOptions?: { restoreOnModalClose?: OverlayHostedModal }, -): boolean { - return sendToActiveOverlayWindowHandler(channel, payload, runtimeOptions); -} const buildSetOverlayDebugVisualizationEnabledMainDepsHandler = createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler({ @@ -2121,27 +1888,44 @@ const buildSetOverlayDebugVisualizationEnabledMainDepsHandler = appState.overlayDebugVisualizationEnabled = next; }, }); -const setOverlayDebugVisualizationEnabledMainDeps = - buildSetOverlayDebugVisualizationEnabledMainDepsHandler(); -const setOverlayDebugVisualizationEnabledHandler = createSetOverlayDebugVisualizationEnabledHandler( - setOverlayDebugVisualizationEnabledMainDeps, -); - -function setOverlayDebugVisualizationEnabled(enabled: boolean): void { - setOverlayDebugVisualizationEnabledHandler(enabled); -} const buildOpenRuntimeOptionsPaletteMainDepsHandler = createBuildOpenRuntimeOptionsPaletteMainDepsHandler({ openRuntimeOptionsPaletteRuntime: () => overlayModalRuntime.openRuntimeOptionsPalette(), }); -const openRuntimeOptionsPaletteMainDeps = buildOpenRuntimeOptionsPaletteMainDepsHandler(); -const openRuntimeOptionsPaletteHandler = createOpenRuntimeOptionsPaletteHandler( - openRuntimeOptionsPaletteMainDeps, -); +const overlayVisibilityComposer = composeOverlayVisibilityRuntime({ + overlayVisibilityRuntime, + restorePreviousSecondarySubVisibilityMainDeps: + buildRestorePreviousSecondarySubVisibilityMainDepsHandler(), + broadcastRuntimeOptionsChangedMainDeps: buildBroadcastRuntimeOptionsChangedMainDepsHandler(), + sendToActiveOverlayWindowMainDeps: buildSendToActiveOverlayWindowMainDepsHandler(), + setOverlayDebugVisualizationEnabledMainDeps: + buildSetOverlayDebugVisualizationEnabledMainDepsHandler(), + openRuntimeOptionsPaletteMainDeps: buildOpenRuntimeOptionsPaletteMainDepsHandler(), +}); + +function restorePreviousSecondarySubVisibility(): void { + overlayVisibilityComposer.restorePreviousSecondarySubVisibility(); +} + +function broadcastRuntimeOptionsChanged(): void { + overlayVisibilityComposer.broadcastRuntimeOptionsChanged(); +} + +function sendToActiveOverlayWindow( + channel: string, + payload?: unknown, + runtimeOptions?: { restoreOnModalClose?: OverlayHostedModal }, +): boolean { + return overlayVisibilityComposer.sendToActiveOverlayWindow(channel, payload, runtimeOptions); +} + +function setOverlayDebugVisualizationEnabled(enabled: boolean): void { + overlayVisibilityComposer.setOverlayDebugVisualizationEnabled(enabled); +} function openRuntimeOptionsPalette(): void { - openRuntimeOptionsPaletteHandler(); + overlayVisibilityComposer.openRuntimeOptionsPalette(); } function getResolvedConfig() { @@ -2355,18 +2139,9 @@ const { getSetupWindow: () => appState.jellyfinSetupWindow, }, openJellyfinSetupWindowMainDeps: { - createSetupWindow: () => - new BrowserWindow({ - width: 520, - height: 560, - title: 'Jellyfin Setup', - show: true, - autoHideMenuBar: true, - webPreferences: { - nodeIntegration: false, - contextIsolation: true, - }, - }), + createSetupWindow: createCreateJellyfinSetupWindowHandler({ + createBrowserWindow: (options) => new BrowserWindow(options), + }), buildSetupFormHtml: (defaultServer, defaultUser) => buildJellyfinSetupFormHtml(defaultServer, defaultUser), parseSubmissionUrl: (rawUrl) => parseJellyfinSetupSubmissionUrl(rawUrl), @@ -2400,21 +2175,9 @@ const maybeFocusExistingFirstRunSetupWindow = createMaybeFocusExistingFirstRunSe }); const openFirstRunSetupWindowHandler = createOpenFirstRunSetupWindowHandler({ maybeFocusExistingSetupWindow: maybeFocusExistingFirstRunSetupWindow, - createSetupWindow: () => - new BrowserWindow({ - width: 480, - height: 460, - title: 'SubMiner Setup', - show: true, - autoHideMenuBar: true, - resizable: false, - minimizable: false, - maximizable: false, - webPreferences: { - nodeIntegration: false, - contextIsolation: true, - }, - }), + createSetupWindow: createCreateFirstRunSetupWindowHandler({ + createBrowserWindow: (options) => new BrowserWindow(options), + }), getSetupSnapshot: async () => { const snapshot = await firstRunSetupService.getSetupStatus(); return { @@ -2559,18 +2322,9 @@ const maybeFocusExistingAnilistSetupWindow = createMaybeFocusExistingAnilistSetu const buildOpenAnilistSetupWindowMainDepsHandler = createBuildOpenAnilistSetupWindowMainDepsHandler( { maybeFocusExistingSetupWindow: maybeFocusExistingAnilistSetupWindow, - createSetupWindow: () => - new BrowserWindow({ - width: 1000, - height: 760, - title: 'Anilist Setup', - show: true, - autoHideMenuBar: true, - webPreferences: { - nodeIntegration: false, - contextIsolation: true, - }, - }), + createSetupWindow: createCreateAnilistSetupWindowHandler({ + createBrowserWindow: (options) => new BrowserWindow(options), + }), buildAuthorizeUrl: () => buildAnilistSetupUrl({ authorizeUrl: ANILIST_SETUP_CLIENT_ID_URL, @@ -3027,7 +2781,8 @@ const ensureStatsServerStarted = (): string => { knownWordCachePath: path.join(USER_DATA_PATH, 'known-words-cache.json'), mpvSocketPath: appState.mpvSocketPath, ankiConnectConfig: getResolvedConfig().ankiConnect, - resolveAnkiNoteId: (noteId: number) => appState.ankiIntegration?.resolveCurrentNoteId(noteId) ?? noteId, + resolveAnkiNoteId: (noteId: number) => + appState.ankiIntegration?.resolveCurrentNoteId(noteId) ?? noteId, addYomitanNote: async (word: string) => { const ankiUrl = getResolvedConfig().ankiConnect.url || 'http://127.0.0.1:8765'; await syncYomitanDefaultAnkiServerCore(ankiUrl, yomitanDeps, yomitanLogger, { @@ -3201,9 +2956,10 @@ const ensureImmersionTrackerStarted = (): void => { hasAttemptedImmersionTrackerStartup = true; createImmersionTrackerStartup(); }; - -const runStatsCliCommand = createRunStatsCliCommandHandler({ - getResolvedConfig: () => getResolvedConfig(), +const statsStartupRuntime = { + ensureStatsServerStarted: () => ensureStatsServerStarted(), + ensureBackgroundStatsServerStarted: () => ensureBackgroundStatsServerStarted(), + stopBackgroundStatsServer: () => stopBackgroundStatsServer(), ensureImmersionTrackerStarted: () => { appState.statsStartupInProgress = true; try { @@ -3212,13 +2968,18 @@ const runStatsCliCommand = createRunStatsCliCommandHandler({ appState.statsStartupInProgress = false; } }, +} as const; + +const runStatsCliCommand = createRunStatsCliCommandHandler({ + getResolvedConfig: () => getResolvedConfig(), + ensureImmersionTrackerStarted: () => statsStartupRuntime.ensureImmersionTrackerStarted(), ensureVocabularyCleanupTokenizerReady: async () => { await createMecabTokenizerAndCheck(); }, getImmersionTracker: () => appState.immersionTracker, - ensureStatsServerStarted: () => ensureStatsServerStarted(), - ensureBackgroundStatsServerStarted: () => ensureBackgroundStatsServerStarted(), - stopBackgroundStatsServer: () => stopBackgroundStatsServer(), + ensureStatsServerStarted: () => statsStartupRuntime.ensureStatsServerStarted(), + ensureBackgroundStatsServerStarted: () => statsStartupRuntime.ensureBackgroundStatsServerStarted(), + stopBackgroundStatsServer: () => statsStartupRuntime.stopBackgroundStatsServer(), openExternal: (url: string) => shell.openExternal(url), writeResponse: (responsePath, payload) => { writeStatsCliCommandResponse(responsePath, payload); @@ -3432,21 +3193,9 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({ shouldRunHeadlessInitialCommand: () => Boolean(appState.initialArgs && isHeadlessInitialCommand(appState.initialArgs)), shouldUseMinimalStartup: () => - Boolean( - appState.initialArgs?.texthooker || - (appState.initialArgs?.stats && - (appState.initialArgs?.statsCleanup || - appState.initialArgs?.statsBackground || - appState.initialArgs?.statsStop)), - ), + getStartupModeFlags(appState.initialArgs).shouldUseMinimalStartup, shouldSkipHeavyStartup: () => - Boolean( - appState.initialArgs && - (shouldRunSettingsOnlyStartup(appState.initialArgs) || - appState.initialArgs.stats || - appState.initialArgs.dictionary || - appState.initialArgs.setup), - ), + getStartupModeFlags(appState.initialArgs).shouldSkipHeavyStartup, createImmersionTracker: () => { ensureImmersionTrackerStarted(); }, @@ -3458,88 +3207,6 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({ immersionTrackerStartupMainDeps, }); -const { runAndApplyStartupState } = runtimeRegistry.startup.createStartupRuntimeHandlers< - CliArgs, - StartupState, - ReturnType ->({ - appLifecycleRuntimeRunnerMainDeps: { - app: appLifecycleApp, - platform: process.platform, - shouldStartApp: (nextArgs: CliArgs) => shouldStartApp(nextArgs), - parseArgs: (argv: string[]) => parseArgs(argv), - handleCliCommand: (nextArgs: CliArgs, source: CliCommandSource) => - handleCliCommand(nextArgs, source), - printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT), - logNoRunningInstance: () => appLogger.logNoRunningInstance(), - onReady: appReadyRuntimeRunner, - onWillQuitCleanup: () => onWillQuitCleanupHandler(), - shouldRestoreWindowsOnActivate: () => shouldRestoreWindowsOnActivateHandler(), - restoreWindowsOnActivate: () => restoreWindowsOnActivateHandler(), - shouldQuitOnWindowAllClosed: () => !appState.backgroundMode, - }, - createAppLifecycleRuntimeRunner: (params) => createAppLifecycleRuntimeRunner(params), - buildStartupBootstrapMainDeps: (startAppLifecycle) => ({ - argv: process.argv, - parseArgs: (argv: string[]) => parseArgs(argv), - setLogLevel: (level: string, source: LogLevelSource) => { - setLogLevel(level, source); - }, - forceX11Backend: (args: CliArgs) => { - forceX11Backend(args); - }, - enforceUnsupportedWaylandMode: (args: CliArgs) => { - enforceUnsupportedWaylandMode(args); - }, - shouldStartApp: (args: CliArgs) => shouldStartApp(args), - getDefaultSocketPath: () => getDefaultSocketPath(), - defaultTexthookerPort: DEFAULT_TEXTHOOKER_PORT, - configDir: CONFIG_DIR, - defaultConfig: DEFAULT_CONFIG, - generateConfigTemplate: (config: ResolvedConfig) => generateConfigTemplate(config), - generateDefaultConfigFile: ( - args: CliArgs, - options: { - configDir: string; - defaultConfig: unknown; - generateTemplate: (config: unknown) => string; - }, - ) => generateDefaultConfigFile(args, options), - setExitCode: (code) => { - process.exitCode = code; - }, - quitApp: () => requestAppQuit(), - logGenerateConfigError: (message) => logger.error(message), - startAppLifecycle, - }), - createStartupBootstrapRuntimeDeps: (deps) => createStartupBootstrapRuntimeDeps(deps), - runStartupBootstrapRuntime, - applyStartupState: (startupState) => applyStartupState(appState, startupState), -}); - -runAndApplyStartupState(); -if (isAnilistTrackingEnabled(getResolvedConfig())) { - void refreshAnilistClientSecretStateIfEnabled({ force: true }); - anilistStateRuntime.refreshRetryQueueState(); -} -void initializeDiscordPresenceService(); - -const handleCliCommand = createCliCommandRuntimeHandler({ - handleTexthookerOnlyModeTransitionMainDeps: { - isTexthookerOnlyMode: () => appState.texthookerOnlyMode, - ensureOverlayStartupPrereqs: () => ensureOverlayStartupPrereqs(), - setTexthookerOnlyMode: (enabled) => { - appState.texthookerOnlyMode = enabled; - }, - commandNeedsOverlayStartupPrereqs: (inputArgs) => commandNeedsOverlayStartupPrereqs(inputArgs), - startBackgroundWarmups: () => startBackgroundWarmups(), - logInfo: (message: string) => logger.info(message), - }, - createCliCommandContext: () => createCliCommandContextHandler(), - handleCliCommandRuntimeServiceWithContext: (args, source, cliContext) => - handleCliCommandRuntimeServiceWithContext(args, source, cliContext), -}); - function ensureOverlayStartupPrereqs(): void { if (appState.subtitlePosition === null) { loadSubtitlePosition(); @@ -3584,29 +3251,6 @@ async function ensureYoutubePlaybackRuntimeReady(): Promise { ensureOverlayWindowsReadyForVisibilityActions(); } -const handleInitialArgsRuntimeHandler = createInitialArgsRuntimeHandler({ - getInitialArgs: () => appState.initialArgs, - isBackgroundMode: () => appState.backgroundMode, - shouldEnsureTrayOnStartup: () => - shouldEnsureTrayOnStartupForInitialArgs(process.platform, appState.initialArgs), - shouldRunHeadlessInitialCommand: (args) => isHeadlessInitialCommand(args), - ensureTray: () => ensureTray(), - isTexthookerOnlyMode: () => appState.texthookerOnlyMode, - hasImmersionTracker: () => Boolean(appState.immersionTracker), - getMpvClient: () => appState.mpvClient, - commandNeedsOverlayStartupPrereqs: (args) => commandNeedsOverlayStartupPrereqs(args), - commandNeedsOverlayRuntime: (args) => commandNeedsOverlayRuntime(args), - ensureOverlayStartupPrereqs: () => ensureOverlayStartupPrereqs(), - isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized, - initializeOverlayRuntime: () => initializeOverlayRuntime(), - logInfo: (message) => logger.info(message), - handleCliCommand: (args, source) => handleCliCommand(args, source), -}); - -function handleInitialArgs(): void { - handleInitialArgsRuntimeHandler(); -} - const { createMpvClientRuntimeService: createMpvClientRuntimeServiceHandler, updateMpvSubtitleRenderMetrics: updateMpvSubtitleRenderMetricsHandler, @@ -3624,7 +3268,7 @@ const { bindMpvMainEventHandlersMainDeps: { appState, getQuitOnDisconnectArmed: () => - jellyfinPlayQuitOnDisconnectArmed || youtubePlayQuitOnDisconnectArmed, + jellyfinPlayQuitOnDisconnectArmed || youtubePlaybackRuntime.getQuitOnDisconnectArmed(), scheduleQuitCheck: (callback) => { setTimeout(callback, 500); }, @@ -3646,7 +3290,7 @@ const { subtitleProcessingController.onSubtitleChange(text); }, refreshDiscordPresence: () => { - publishDiscordPresence(); + discordPresenceRuntime.publishDiscordPresence(); }, ensureImmersionTrackerInitialized: () => { ensureImmersionTrackerStarted(); @@ -3654,16 +3298,16 @@ const { tokenizeSubtitleForImmersion: async (text): Promise => tokenizeSubtitleDeferred ? await tokenizeSubtitleDeferred(text) : null, updateCurrentMediaPath: (path) => { - autoPlayReadySignalMediaPath = null; + autoplayReadyGate.invalidatePendingAutoplayReadyFallbacks(); currentMediaTokenizationGate.updateCurrentMediaPath(path); startupOsdSequencer.reset(); - clearScheduledSubtitlePrefetchRefresh(); - subtitlePrefetchInitController.cancelPendingInit(); + subtitlePrefetchRuntime.clearScheduledSubtitlePrefetchRefresh(); + subtitlePrefetchRuntime.cancelPendingInit(); youtubePrimarySubtitleNotificationRuntime.handleMediaPathChange(path); if (path) { ensureImmersionTrackerStarted(); // Delay slightly to allow MPV's track-list to be populated. - scheduleSubtitlePrefetchRefresh(500); + subtitlePrefetchRuntime.scheduleSubtitlePrefetchRefresh(500); } mediaRuntime.updateCurrentMediaPath(path); }, @@ -3690,7 +3334,7 @@ const { if (!isTokenizationWarmupReady()) { return; } - maybeSignalPluginAutoplayReady( + autoplayReadyGate.maybeSignalPluginAutoplayReady( { text: '__warm__', tokens: null }, { forceWhilePaused: true }, ); @@ -3811,7 +3455,10 @@ const { appState.currentMediaPath?.trim() || appState.mpvClient?.currentVideoPath?.trim() || null, ); startupOsdSequencer.markTokenizationReady(); - maybeSignalPluginAutoplayReady({ text, tokens: null }, { forceWhilePaused: true }); + autoplayReadyGate.maybeSignalPluginAutoplayReady( + { text, tokens: null }, + { forceWhilePaused: true }, + ); }, }, createTokenizerRuntimeDeps: (deps) => @@ -4381,48 +4028,6 @@ function parseTrackId(value: unknown): number | null { return null; } -function getActiveSubtitleTrack( - currentTrackRaw: unknown, - trackListRaw: unknown, - sidRaw: unknown, -): MpvSubtitleTrackLike | null { - if (currentTrackRaw && typeof currentTrackRaw === 'object') { - const track = currentTrackRaw as MpvSubtitleTrackLike; - if (track.type === undefined || track.type === 'sub') { - return track; - } - } - - const sid = parseTrackId(sidRaw); - if (!Array.isArray(trackListRaw)) { - return null; - } - - const bySid = - sid === null - ? null - : ((trackListRaw.find((entry: unknown) => { - if (!entry || typeof entry !== 'object') { - return false; - } - const track = entry as MpvSubtitleTrackLike; - return track.type === 'sub' && parseTrackId(track.id) === sid; - }) as MpvSubtitleTrackLike | undefined) ?? null); - if (bySid) { - return bySid; - } - - return ( - (trackListRaw.find((entry: unknown) => { - if (!entry || typeof entry !== 'object') { - return false; - } - const track = entry as MpvSubtitleTrackLike; - return track.type === 'sub' && track.selected === true; - }) as MpvSubtitleTrackLike | undefined) ?? null - ); -} - function buildFfmpegSubtitleExtractionArgs( videoPath: string, ffIndex: number, @@ -4495,41 +4100,6 @@ async function extractInternalSubtitleTrackToTempFile( }; } -async function resolveActiveSubtitleSidebarSource( - currentExternalFilenameRaw: unknown, - currentTrackRaw: unknown, - trackListRaw: unknown, - sidRaw: unknown, - videoPath: string, -): Promise<{ path: string; sourceKey: string; cleanup?: () => Promise } | null> { - const currentExternalFilename = - typeof currentExternalFilenameRaw === 'string' ? currentExternalFilenameRaw.trim() : ''; - if (currentExternalFilename) { - return { path: currentExternalFilename, sourceKey: currentExternalFilename }; - } - - const track = getActiveSubtitleTrack(currentTrackRaw, trackListRaw, sidRaw); - if (!track) { - return null; - } - - const externalFilename = - typeof track['external-filename'] === 'string' ? track['external-filename'].trim() : ''; - if (externalFilename) { - return { path: externalFilename, sourceKey: externalFilename }; - } - - const ffmpegPath = getResolvedConfig().subsync.ffmpeg_path.trim() || 'ffmpeg'; - const extracted = await extractInternalSubtitleTrackToTempFile(ffmpegPath, videoPath, track); - if (!extracted) { - return null; - } - return { - ...extracted, - sourceKey: buildSubtitleSidebarSourceKey(videoPath, track, extracted.path), - }; -} - const shiftSubtitleDelayToAdjacentCueHandler = createShiftSubtitleDelayToAdjacentCueHandler({ getMpvClient: () => appState.mpvClient, loadSubtitleSourceText, @@ -4589,7 +4159,8 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ openYomitanSettings: () => openYomitanSettings(), quitApp: () => requestAppQuit(), toggleVisibleOverlay: () => toggleVisibleOverlay(), - tokenizeCurrentSubtitle: async () => withCurrentSubtitleTiming(await tokenizeSubtitle(appState.currentSubText)), + tokenizeCurrentSubtitle: async () => + withCurrentSubtitleTiming(await tokenizeSubtitle(appState.currentSubText)), getCurrentSubtitleRaw: () => appState.currentSubText, getCurrentSubtitleAss: () => appState.currentSubAssText, getSubtitleSidebarSnapshot: async () => { @@ -4611,19 +4182,14 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ } try { - const [ - currentExternalFilenameRaw, - currentTrackRaw, - trackListRaw, - sidRaw, - videoPathRaw, - ] = await Promise.all([ - client.requestProperty('current-tracks/sub/external-filename').catch(() => null), - client.requestProperty('current-tracks/sub').catch(() => null), - client.requestProperty('track-list'), - client.requestProperty('sid'), - client.requestProperty('path'), - ]); + const [currentExternalFilenameRaw, currentTrackRaw, trackListRaw, sidRaw, videoPathRaw] = + await Promise.all([ + client.requestProperty('current-tracks/sub/external-filename').catch(() => null), + client.requestProperty('current-tracks/sub').catch(() => null), + client.requestProperty('track-list'), + client.requestProperty('sid'), + client.requestProperty('path'), + ]); const videoPath = typeof videoPathRaw === 'string' ? videoPathRaw : ''; if (!videoPath) { return { @@ -4634,13 +4200,13 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ }; } - const resolvedSource = await resolveActiveSubtitleSidebarSource( + const resolvedSource = await resolveActiveSubtitleSidebarSourceHandler({ currentExternalFilenameRaw, currentTrackRaw, trackListRaw, sidRaw, videoPath, - ); + }); if (!resolvedSource) { return { cues: appState.activeParsedSubtitleCues, @@ -4650,16 +4216,16 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ }; } - if (appState.activeParsedSubtitleSource === resolvedSource.sourceKey) { - return { - cues: appState.activeParsedSubtitleCues, - currentTimeSec, - currentSubtitle, - config, - }; - } - try { + if (appState.activeParsedSubtitleSource === resolvedSource.sourceKey) { + return { + cues: appState.activeParsedSubtitleCues, + currentTimeSec, + currentSubtitle, + config, + }; + } + const content = await loadSubtitleSourceText(resolvedSource.path); const cues = parseSubtitleCues(content, resolvedSource.path); appState.activeParsedSubtitleCues = cues; @@ -4761,58 +4327,168 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ registerIpcRuntimeServices, }, }); -const createCliCommandContextHandler = createCliCommandContextFactory({ - appState, - setLogLevel: (level) => setLogLevel(level, 'cli'), - texthookerService, - getResolvedConfig: () => getResolvedConfig(), - openExternal: (url: string) => shell.openExternal(url), - logBrowserOpenError: (url: string, error: unknown) => - logger.error(`Failed to open browser for texthooker URL: ${url}`, error), - showMpvOsd: (text: string) => showMpvOsd(text), - initializeOverlayRuntime: () => initializeOverlayRuntime(), - toggleVisibleOverlay: () => toggleVisibleOverlay(), - openFirstRunSetupWindow: () => openFirstRunSetupWindow(), - setVisibleOverlayVisible: (visible: boolean) => setVisibleOverlayVisible(visible), - copyCurrentSubtitle: () => copyCurrentSubtitle(), - startPendingMultiCopy: (timeoutMs: number) => startPendingMultiCopy(timeoutMs), - mineSentenceCard: () => mineSentenceCard(), - startPendingMineSentenceMultiple: (timeoutMs: number) => - startPendingMineSentenceMultiple(timeoutMs), - updateLastCardFromClipboard: () => updateLastCardFromClipboard(), - refreshKnownWordCache: () => refreshKnownWordCache(), - triggerFieldGrouping: () => triggerFieldGrouping(), - triggerSubsyncFromConfig: () => triggerSubsyncFromConfig(), - markLastCardAsAudioCard: () => markLastCardAsAudioCard(), - getAnilistStatus: () => anilistStateRuntime.getStatusSnapshot(), - clearAnilistToken: () => anilistStateRuntime.clearTokenState(), - openAnilistSetupWindow: () => openAnilistSetupWindow(), - openJellyfinSetupWindow: () => openJellyfinSetupWindow(), - getAnilistQueueStatus: () => anilistStateRuntime.getQueueStatusSnapshot(), - processNextAnilistRetryUpdate: () => processNextAnilistRetryUpdate(), - generateCharacterDictionary: async (targetPath?: string) => { - const disabledReason = yomitanProfilePolicy.getCharacterDictionaryDisabledReason(); - if (disabledReason) { - throw new Error(disabledReason); - } - return await characterDictionaryRuntime.generateForCurrentMedia(targetPath); +const { handleCliCommand, handleInitialArgs } = composeCliStartupHandlers({ + cliCommandContextMainDeps: { + appState, + setLogLevel: (level) => setLogLevel(level, 'cli'), + texthookerService, + getResolvedConfig: () => getResolvedConfig(), + openExternal: (url: string) => shell.openExternal(url), + logBrowserOpenError: (url: string, error: unknown) => + logger.error(`Failed to open browser for texthooker URL: ${url}`, error), + showMpvOsd: (text: string) => showMpvOsd(text), + initializeOverlayRuntime: () => initializeOverlayRuntime(), + toggleVisibleOverlay: () => toggleVisibleOverlay(), + openFirstRunSetupWindow: () => openFirstRunSetupWindow(), + setVisibleOverlayVisible: (visible: boolean) => setVisibleOverlayVisible(visible), + copyCurrentSubtitle: () => copyCurrentSubtitle(), + startPendingMultiCopy: (timeoutMs: number) => startPendingMultiCopy(timeoutMs), + mineSentenceCard: () => mineSentenceCard(), + startPendingMineSentenceMultiple: (timeoutMs: number) => + startPendingMineSentenceMultiple(timeoutMs), + updateLastCardFromClipboard: () => updateLastCardFromClipboard(), + refreshKnownWordCache: () => refreshKnownWordCache(), + triggerFieldGrouping: () => triggerFieldGrouping(), + triggerSubsyncFromConfig: () => triggerSubsyncFromConfig(), + markLastCardAsAudioCard: () => markLastCardAsAudioCard(), + getAnilistStatus: () => anilistStateRuntime.getStatusSnapshot(), + clearAnilistToken: () => anilistStateRuntime.clearTokenState(), + openAnilistSetupWindow: () => openAnilistSetupWindow(), + openJellyfinSetupWindow: () => openJellyfinSetupWindow(), + getAnilistQueueStatus: () => anilistStateRuntime.getQueueStatusSnapshot(), + processNextAnilistRetryUpdate: () => processNextAnilistRetryUpdate(), + generateCharacterDictionary: async (targetPath?: string) => { + const disabledReason = yomitanProfilePolicy.getCharacterDictionaryDisabledReason(); + if (disabledReason) { + throw new Error(disabledReason); + } + return await characterDictionaryRuntime.generateForCurrentMedia(targetPath); + }, + runJellyfinCommand: (argsFromCommand: CliArgs) => runJellyfinCommand(argsFromCommand), + runStatsCommand: (argsFromCommand: CliArgs, source: CliCommandSource) => + runStatsCliCommand(argsFromCommand, source), + runYoutubePlaybackFlow: (request) => youtubePlaybackRuntime.runYoutubePlaybackFlow(request), + openYomitanSettings: () => openYomitanSettings(), + cycleSecondarySubMode: () => handleCycleSecondarySubMode(), + openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(), + printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT), + stopApp: () => requestAppQuit(), + hasMainWindow: () => Boolean(overlayManager.getMainWindow()), + getMultiCopyTimeoutMs: () => getConfiguredShortcuts().multiCopyTimeoutMs, + schedule: (fn: () => void, delayMs: number) => setTimeout(fn, delayMs), + logInfo: (message: string) => logger.info(message), + logWarn: (message: string) => logger.warn(message), + logError: (message: string, err: unknown) => logger.error(message, err), + }, + cliCommandRuntimeHandlerMainDeps: { + handleTexthookerOnlyModeTransitionMainDeps: { + isTexthookerOnlyMode: () => appState.texthookerOnlyMode, + ensureOverlayStartupPrereqs: () => ensureOverlayStartupPrereqs(), + setTexthookerOnlyMode: (enabled) => { + appState.texthookerOnlyMode = enabled; + }, + commandNeedsOverlayStartupPrereqs: (inputArgs) => + commandNeedsOverlayStartupPrereqs(inputArgs), + startBackgroundWarmups: () => startBackgroundWarmups(), + logInfo: (message: string) => logger.info(message), + }, + handleCliCommandRuntimeServiceWithContext: (args, source, cliContext) => + handleCliCommandRuntimeServiceWithContext(args, source, cliContext), + }, + initialArgsRuntimeHandlerMainDeps: { + getInitialArgs: () => appState.initialArgs, + isBackgroundMode: () => appState.backgroundMode, + shouldEnsureTrayOnStartup: () => + shouldEnsureTrayOnStartupForInitialArgs(process.platform, appState.initialArgs), + shouldRunHeadlessInitialCommand: (args) => isHeadlessInitialCommand(args), + ensureTray: () => ensureTray(), + isTexthookerOnlyMode: () => appState.texthookerOnlyMode, + hasImmersionTracker: () => Boolean(appState.immersionTracker), + getMpvClient: () => appState.mpvClient, + commandNeedsOverlayStartupPrereqs: (args) => commandNeedsOverlayStartupPrereqs(args), + commandNeedsOverlayRuntime: (args) => commandNeedsOverlayRuntime(args), + ensureOverlayStartupPrereqs: () => ensureOverlayStartupPrereqs(), + isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized, + initializeOverlayRuntime: () => initializeOverlayRuntime(), + logInfo: (message) => logger.info(message), }, - runJellyfinCommand: (argsFromCommand: CliArgs) => runJellyfinCommand(argsFromCommand), - runStatsCommand: (argsFromCommand: CliArgs, source: CliCommandSource) => - runStatsCliCommand(argsFromCommand, source), - runYoutubePlaybackFlow: (request) => runYoutubePlaybackFlowMain(request), - openYomitanSettings: () => openYomitanSettings(), - cycleSecondarySubMode: () => handleCycleSecondarySubMode(), - openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(), - printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT), - stopApp: () => requestAppQuit(), - hasMainWindow: () => Boolean(overlayManager.getMainWindow()), - getMultiCopyTimeoutMs: () => getConfiguredShortcuts().multiCopyTimeoutMs, - schedule: (fn: () => void, delayMs: number) => setTimeout(fn, delayMs), - logInfo: (message: string) => logger.info(message), - logWarn: (message: string) => logger.warn(message), - logError: (message: string, err: unknown) => logger.error(message, err), }); +const { runAndApplyStartupState } = composeHeadlessStartupHandlers< + CliArgs, + StartupState, + ReturnType +>({ + startupRuntimeHandlersDeps: { + appLifecycleRuntimeRunnerMainDeps: { + app: appLifecycleApp, + platform: process.platform, + shouldStartApp: (nextArgs: CliArgs) => shouldStartApp(nextArgs), + parseArgs: (argv: string[]) => parseArgs(argv), + handleCliCommand: (nextArgs: CliArgs, source: CliCommandSource) => + handleCliCommand(nextArgs, source), + printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT), + logNoRunningInstance: () => appLogger.logNoRunningInstance(), + onReady: appReadyRuntimeRunner, + onWillQuitCleanup: () => onWillQuitCleanupHandler(), + shouldRestoreWindowsOnActivate: () => shouldRestoreWindowsOnActivateHandler(), + restoreWindowsOnActivate: () => restoreWindowsOnActivateHandler(), + shouldQuitOnWindowAllClosed: () => !appState.backgroundMode, + }, + createAppLifecycleRuntimeRunner: (params) => createAppLifecycleRuntimeRunner(params), + buildStartupBootstrapMainDeps: (startAppLifecycle) => ({ + argv: process.argv, + parseArgs: (argv: string[]) => parseArgs(argv), + setLogLevel: (level: string, source: LogLevelSource) => { + setLogLevel(level, source); + }, + forceX11Backend: (args: CliArgs) => { + forceX11Backend(args); + }, + enforceUnsupportedWaylandMode: (args: CliArgs) => { + enforceUnsupportedWaylandMode(args); + }, + shouldStartApp: (args: CliArgs) => shouldStartApp(args), + getDefaultSocketPath: () => getDefaultSocketPath(), + defaultTexthookerPort: DEFAULT_TEXTHOOKER_PORT, + configDir: CONFIG_DIR, + defaultConfig: DEFAULT_CONFIG, + generateConfigTemplate: (config: ResolvedConfig) => generateConfigTemplate(config), + generateDefaultConfigFile: ( + args: CliArgs, + options: { + configDir: string; + defaultConfig: unknown; + generateTemplate: (config: unknown) => string; + }, + ) => generateDefaultConfigFile(args, options), + setExitCode: (code) => { + process.exitCode = code; + }, + quitApp: () => requestAppQuit(), + logGenerateConfigError: (message) => logger.error(message), + startAppLifecycle, + }), + createStartupBootstrapRuntimeDeps: (deps) => createStartupBootstrapRuntimeDeps(deps), + runStartupBootstrapRuntime, + applyStartupState: (startupState) => applyStartupState(appState, startupState), + }, +}); + +runAndApplyStartupState(); +const startupModeFlags = getStartupModeFlags(appState.initialArgs); +const shouldUseMinimalStartup = startupModeFlags.shouldUseMinimalStartup; +const shouldSkipHeavyStartup = startupModeFlags.shouldSkipHeavyStartup; +if (!appState.initialArgs || (!shouldUseMinimalStartup && !shouldSkipHeavyStartup)) { + if (isAnilistTrackingEnabled(getResolvedConfig())) { + void refreshAnilistClientSecretStateIfEnabled({ force: true }).catch((error) => { + logger.error('Failed to refresh AniList client secret state during startup', error); + }); + anilistStateRuntime.refreshRetryQueueState(); + } + void initializeDiscordPresenceService().catch((error) => { + logger.error('Failed to initialize Discord presence service during startup', error); + }); +} const { createMainWindow: createMainWindowHandler, createModalWindow: createModalWindowHandler } = createOverlayWindowRuntimeHandlers({ createOverlayWindowDeps: { diff --git a/src/main/boot/services.test.ts b/src/main/boot/services.test.ts new file mode 100644 index 0000000..c09e69b --- /dev/null +++ b/src/main/boot/services.test.ts @@ -0,0 +1,115 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createMainBootServices } from './services'; + +test('createMainBootServices builds boot-phase service bundle', () => { + type MockAppLifecycleApp = { + requestSingleInstanceLock: () => boolean; + quit: () => void; + on: (event: string, listener: (...args: unknown[]) => void) => MockAppLifecycleApp; + whenReady: () => Promise; + }; + + const calls: string[] = []; + let setPathValue: string | null = null; + const appOnCalls: string[] = []; + let secondInstanceHandlerRegistered = false; + + const services = createMainBootServices< + { configDir: string }, + { targetPath: string }, + { targetPath: string }, + { targetPath: string }, + { kind: string }, + { scope: string; warn: () => void; info: () => void; error: () => void }, + { registry: boolean }, + { getModalWindow: () => null }, + { inputState: boolean; getModalInputExclusive: () => boolean; handleModalInputStateChange: (isActive: boolean) => void }, + { measurementStore: boolean }, + { modalRuntime: boolean }, + { mpvSocketPath: string; texthookerPort: number }, + MockAppLifecycleApp + >({ + platform: 'linux', + argv: ['node', 'main.ts'], + appDataDir: undefined, + xdgConfigHome: undefined, + homeDir: '/home/tester', + defaultMpvLogFile: '/tmp/default.log', + envMpvLog: ' /tmp/custom.log ', + defaultTexthookerPort: 5174, + getDefaultSocketPath: () => '/tmp/subminer.sock', + resolveConfigDir: () => '/tmp/subminer-config', + existsSync: () => false, + mkdirSync: (targetPath) => { + calls.push(`mkdir:${targetPath}`); + }, + joinPath: (...parts) => parts.join('/'), + app: { + setPath: (_name, value) => { + setPathValue = value; + }, + quit: () => {}, + on: (event: string) => { + appOnCalls.push(event); + return {}; + }, + whenReady: async () => {}, + }, + shouldBypassSingleInstanceLock: () => false, + requestSingleInstanceLockEarly: () => true, + registerSecondInstanceHandlerEarly: () => { + secondInstanceHandlerRegistered = true; + }, + onConfigStartupParseError: () => { + throw new Error('unexpected parse failure'); + }, + createConfigService: (configDir) => ({ configDir }), + createAnilistTokenStore: (targetPath) => ({ targetPath }), + createJellyfinTokenStore: (targetPath) => ({ targetPath }), + createAnilistUpdateQueue: (targetPath) => ({ targetPath }), + createSubtitleWebSocket: () => ({ kind: 'ws' }), + createLogger: (scope) => + ({ + scope, + warn: () => {}, + info: () => {}, + error: () => {}, + }) as const, + createMainRuntimeRegistry: () => ({ registry: true }), + createOverlayManager: () => ({ + getModalWindow: () => null, + }), + createOverlayModalInputState: () => ({ inputState: true, getModalInputExclusive: () => false, handleModalInputStateChange: () => {} }), + createOverlayContentMeasurementStore: () => ({ measurementStore: true }), + getSyncOverlayShortcutsForModal: () => () => {}, + getSyncOverlayVisibilityForModal: () => () => {}, + createOverlayModalRuntime: () => ({ modalRuntime: true }), + createAppState: (input) => ({ ...input }), + }); + + assert.equal(services.configDir, '/tmp/subminer-config'); + assert.equal(services.userDataPath, '/tmp/subminer-config'); + assert.equal(services.defaultMpvLogPath, '/tmp/custom.log'); + assert.equal(services.defaultImmersionDbPath, '/tmp/subminer-config/immersion.sqlite'); + assert.deepEqual(services.configService, { configDir: '/tmp/subminer-config' }); + assert.deepEqual(services.anilistTokenStore, { + targetPath: '/tmp/subminer-config/anilist-token-store.json', + }); + assert.deepEqual(services.jellyfinTokenStore, { + targetPath: '/tmp/subminer-config/jellyfin-token-store.json', + }); + assert.deepEqual(services.anilistUpdateQueue, { + targetPath: '/tmp/subminer-config/anilist-retry-queue.json', + }); + assert.deepEqual(services.appState, { + mpvSocketPath: '/tmp/subminer.sock', + texthookerPort: 5174, + }); + assert.equal(services.appLifecycleApp.on('ready', () => {}), services.appLifecycleApp); + assert.equal(services.appLifecycleApp.on('second-instance', () => {}), services.appLifecycleApp); + assert.deepEqual(appOnCalls, ['ready']); + assert.equal(secondInstanceHandlerRegistered, true); + assert.deepEqual(calls, ['mkdir:/tmp/subminer-config']); + assert.equal(setPathValue, '/tmp/subminer-config'); +}); diff --git a/src/main/boot/services.ts b/src/main/boot/services.ts new file mode 100644 index 0000000..862c1fa --- /dev/null +++ b/src/main/boot/services.ts @@ -0,0 +1,279 @@ +import type { BrowserWindow } from 'electron'; +import { ConfigStartupParseError } from '../../config'; + +export interface AppLifecycleShape { + requestSingleInstanceLock: () => boolean; + quit: () => void; + on: (event: string, listener: (...args: unknown[]) => void) => unknown; + whenReady: () => Promise; +} + +export interface OverlayModalInputStateShape { + getModalInputExclusive: () => boolean; + handleModalInputStateChange: (isActive: boolean) => void; +} + +export interface MainBootServicesParams< + TConfigService, + TAnilistTokenStore, + TJellyfinTokenStore, + TAnilistUpdateQueue, + TSubtitleWebSocket, + TLogger, + TRuntimeRegistry, + TOverlayManager, + TOverlayModalInputState, + TOverlayContentMeasurementStore, + TOverlayModalRuntime, + TAppState, + TAppLifecycleApp, +> { + platform: NodeJS.Platform; + argv: string[]; + appDataDir: string | undefined; + xdgConfigHome: string | undefined; + homeDir: string; + defaultMpvLogFile: string; + envMpvLog: string | undefined; + defaultTexthookerPort: number; + getDefaultSocketPath: () => string; + resolveConfigDir: (input: { + platform: NodeJS.Platform; + appDataDir: string | undefined; + xdgConfigHome: string | undefined; + homeDir: string; + existsSync: (targetPath: string) => boolean; + }) => string; + existsSync: (targetPath: string) => boolean; + mkdirSync: (targetPath: string, options: { recursive: true }) => void; + joinPath: (...parts: string[]) => string; + app: { + setPath: (name: string, value: string) => void; + quit: () => void; + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type -- Electron App.on has 50+ overloaded signatures + on: Function; + whenReady: () => Promise; + }; + shouldBypassSingleInstanceLock: () => boolean; + requestSingleInstanceLockEarly: () => boolean; + registerSecondInstanceHandlerEarly: ( + listener: (_event: unknown, argv: string[]) => void, + ) => void; + onConfigStartupParseError: (error: ConfigStartupParseError) => void; + createConfigService: (configDir: string) => TConfigService; + createAnilistTokenStore: (targetPath: string) => TAnilistTokenStore; + createJellyfinTokenStore: (targetPath: string) => TJellyfinTokenStore; + createAnilistUpdateQueue: (targetPath: string) => TAnilistUpdateQueue; + createSubtitleWebSocket: () => TSubtitleWebSocket; + createLogger: (scope: string) => TLogger & { + warn: (message: string) => void; + info: (message: string) => void; + error: (message: string, details?: unknown) => void; + }; + createMainRuntimeRegistry: () => TRuntimeRegistry; + createOverlayManager: () => TOverlayManager; + createOverlayModalInputState: (params: { + getModalWindow: () => BrowserWindow | null; + syncOverlayShortcutsForModal: (isActive: boolean) => void; + syncOverlayVisibilityForModal: () => void; + }) => TOverlayModalInputState; + createOverlayContentMeasurementStore: (params: { + logger: TLogger; + }) => TOverlayContentMeasurementStore; + getSyncOverlayShortcutsForModal: () => (isActive: boolean) => void; + getSyncOverlayVisibilityForModal: () => () => void; + createOverlayModalRuntime: (params: { + overlayManager: TOverlayManager; + overlayModalInputState: TOverlayModalInputState; + onModalStateChange: (isActive: boolean) => void; + }) => TOverlayModalRuntime; + createAppState: (input: { + mpvSocketPath: string; + texthookerPort: number; + }) => TAppState; +} + +export interface MainBootServicesResult< + TConfigService, + TAnilistTokenStore, + TJellyfinTokenStore, + TAnilistUpdateQueue, + TSubtitleWebSocket, + TLogger, + TRuntimeRegistry, + TOverlayManager, + TOverlayModalInputState, + TOverlayContentMeasurementStore, + TOverlayModalRuntime, + TAppState, + TAppLifecycleApp, +> { + configDir: string; + userDataPath: string; + defaultMpvLogPath: string; + defaultImmersionDbPath: string; + configService: TConfigService; + anilistTokenStore: TAnilistTokenStore; + jellyfinTokenStore: TJellyfinTokenStore; + anilistUpdateQueue: TAnilistUpdateQueue; + subtitleWsService: TSubtitleWebSocket; + annotationSubtitleWsService: TSubtitleWebSocket; + logger: TLogger; + runtimeRegistry: TRuntimeRegistry; + overlayManager: TOverlayManager; + overlayModalInputState: TOverlayModalInputState; + overlayContentMeasurementStore: TOverlayContentMeasurementStore; + overlayModalRuntime: TOverlayModalRuntime; + appState: TAppState; + appLifecycleApp: TAppLifecycleApp; +} + +export function createMainBootServices< + TConfigService, + TAnilistTokenStore, + TJellyfinTokenStore, + TAnilistUpdateQueue, + TSubtitleWebSocket, + TLogger, + TRuntimeRegistry, + TOverlayManager extends { getModalWindow: () => BrowserWindow | null }, + TOverlayModalInputState extends OverlayModalInputStateShape, + TOverlayContentMeasurementStore, + TOverlayModalRuntime, + TAppState, + TAppLifecycleApp extends AppLifecycleShape, +>( + params: MainBootServicesParams< + TConfigService, + TAnilistTokenStore, + TJellyfinTokenStore, + TAnilistUpdateQueue, + TSubtitleWebSocket, + TLogger, + TRuntimeRegistry, + TOverlayManager, + TOverlayModalInputState, + TOverlayContentMeasurementStore, + TOverlayModalRuntime, + TAppState, + TAppLifecycleApp + >, +): MainBootServicesResult< + TConfigService, + TAnilistTokenStore, + TJellyfinTokenStore, + TAnilistUpdateQueue, + TSubtitleWebSocket, + TLogger, + TRuntimeRegistry, + TOverlayManager, + TOverlayModalInputState, + TOverlayContentMeasurementStore, + TOverlayModalRuntime, + TAppState, + TAppLifecycleApp +> { + const configDir = params.resolveConfigDir({ + platform: params.platform, + appDataDir: params.appDataDir, + xdgConfigHome: params.xdgConfigHome, + homeDir: params.homeDir, + existsSync: params.existsSync, + }); + const userDataPath = configDir; + const defaultMpvLogPath = params.envMpvLog?.trim() || params.defaultMpvLogFile; + const defaultImmersionDbPath = params.joinPath(userDataPath, 'immersion.sqlite'); + + const configService = (() => { + try { + return params.createConfigService(configDir); + } catch (error) { + if (error instanceof ConfigStartupParseError) { + params.onConfigStartupParseError(error); + } + throw error; + } + })(); + + const anilistTokenStore = params.createAnilistTokenStore( + params.joinPath(userDataPath, 'anilist-token-store.json'), + ); + const jellyfinTokenStore = params.createJellyfinTokenStore( + params.joinPath(userDataPath, 'jellyfin-token-store.json'), + ); + const anilistUpdateQueue = params.createAnilistUpdateQueue( + params.joinPath(userDataPath, 'anilist-retry-queue.json'), + ); + const subtitleWsService = params.createSubtitleWebSocket(); + const annotationSubtitleWsService = params.createSubtitleWebSocket(); + const logger = params.createLogger('main'); + const runtimeRegistry = params.createMainRuntimeRegistry(); + const overlayManager = params.createOverlayManager(); + const overlayModalInputState = params.createOverlayModalInputState({ + getModalWindow: () => overlayManager.getModalWindow(), + syncOverlayShortcutsForModal: (isActive: boolean) => { + params.getSyncOverlayShortcutsForModal()(isActive); + }, + syncOverlayVisibilityForModal: () => { + params.getSyncOverlayVisibilityForModal()(); + }, + }); + const overlayContentMeasurementStore = params.createOverlayContentMeasurementStore({ + logger, + }); + const overlayModalRuntime = params.createOverlayModalRuntime({ + overlayManager, + overlayModalInputState, + onModalStateChange: (isActive: boolean) => + overlayModalInputState.handleModalInputStateChange(isActive), + }); + const appState = params.createAppState({ + mpvSocketPath: params.getDefaultSocketPath(), + texthookerPort: params.defaultTexthookerPort, + }); + + if (!params.existsSync(userDataPath)) { + params.mkdirSync(userDataPath, { recursive: true }); + } + params.app.setPath('userData', userDataPath); + + const appLifecycleApp = { + requestSingleInstanceLock: () => + params.shouldBypassSingleInstanceLock() + ? true + : params.requestSingleInstanceLockEarly(), + quit: () => params.app.quit(), + on: (event: string, listener: (...args: unknown[]) => void) => { + if (event === 'second-instance') { + params.registerSecondInstanceHandlerEarly( + listener as (_event: unknown, argv: string[]) => void, + ); + return appLifecycleApp; + } + params.app.on(event, listener); + return appLifecycleApp; + }, + whenReady: () => params.app.whenReady(), + } satisfies AppLifecycleShape as TAppLifecycleApp; + + return { + configDir, + userDataPath, + defaultMpvLogPath, + defaultImmersionDbPath, + configService, + anilistTokenStore, + jellyfinTokenStore, + anilistUpdateQueue, + subtitleWsService, + annotationSubtitleWsService, + logger, + runtimeRegistry, + overlayManager, + overlayModalInputState, + overlayContentMeasurementStore, + overlayModalRuntime, + appState, + appLifecycleApp, + }; +} diff --git a/src/main/character-dictionary-runtime.ts b/src/main/character-dictionary-runtime.ts index dc21b3b..3480500 100644 --- a/src/main/character-dictionary-runtime.ts +++ b/src/main/character-dictionary-runtime.ts @@ -1,1028 +1,60 @@ import * as fs from 'fs'; import * as os from 'os'; import * as path from 'path'; -import { createHash } from 'node:crypto'; -import type { AnilistMediaGuess } from '../core/services/anilist/anilist-updater'; -import type { AnilistCharacterDictionaryCollapsibleSectionKey } from '../types'; import { hasVideoExtension } from '../shared/video-extensions'; +import { + applyCollapsibleOpenStatesToTermEntries, + buildDictionaryTitle, + buildDictionaryZip, + buildSnapshotFromCharacters, + buildSnapshotImagePath, + buildVaImagePath, +} from './character-dictionary-runtime/build'; +import { + buildMergedRevision, + getMergedZipPath, + getSnapshotPath, + normalizeMergedMediaIds, + readSnapshot, + writeSnapshot, +} from './character-dictionary-runtime/cache'; +import { + ANILIST_REQUEST_DELAY_MS, + CHARACTER_DICTIONARY_MERGED_TITLE, + CHARACTER_IMAGE_DOWNLOAD_DELAY_MS, +} from './character-dictionary-runtime/constants'; +import { + downloadCharacterImage, + fetchCharactersForMedia, + resolveAniListMediaIdFromGuess, +} from './character-dictionary-runtime/fetch'; +import type { + CharacterDictionaryBuildResult, + CharacterDictionaryGenerateOptions, + CharacterDictionaryRuntimeDeps, + CharacterDictionarySnapshotImage, + CharacterDictionarySnapshotProgress, + CharacterDictionarySnapshotProgressCallbacks, + CharacterDictionarySnapshotResult, + MergedCharacterDictionaryBuildResult, + ResolvedAniListMedia, +} from './character-dictionary-runtime/types'; -const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co'; -const ANILIST_REQUEST_DELAY_MS = 2000; -const CHARACTER_IMAGE_DOWNLOAD_DELAY_MS = 250; -const HONORIFIC_SUFFIXES = [ - { term: 'さん', reading: 'さん' }, - { term: '様', reading: 'さま' }, - { term: '先生', reading: 'せんせい' }, - { term: '先輩', reading: 'せんぱい' }, - { term: '後輩', reading: 'こうはい' }, - { term: '氏', reading: 'し' }, - { term: '君', reading: 'くん' }, - { term: 'くん', reading: 'くん' }, - { term: 'ちゃん', reading: 'ちゃん' }, - { term: 'たん', reading: 'たん' }, - { term: '坊', reading: 'ぼう' }, - { term: '殿', reading: 'どの' }, - { term: '博士', reading: 'はかせ' }, - { term: '社長', reading: 'しゃちょう' }, - { term: '部長', reading: 'ぶちょう' }, -] as const; -type CharacterDictionaryRole = 'main' | 'primary' | 'side' | 'appears'; - -type CharacterDictionaryGlossaryEntry = string | Record; -type CharacterDictionaryTermEntry = [ - string, - string, - string, - string, - number, - CharacterDictionaryGlossaryEntry[], - number, - string, -]; - -type CharacterDictionarySnapshotImage = { - path: string; - dataBase64: string; -}; - -type CharacterBirthday = [number, number]; - -type JapaneseNameParts = { - hasSpace: boolean; - original: string; - combined: string; - family: string | null; - given: string | null; -}; - -type NameReadings = { - hasSpace: boolean; - original: string; - full: string; - family: string; - given: string; -}; - -export type CharacterDictionarySnapshot = { - formatVersion: number; - mediaId: number; - mediaTitle: string; - entryCount: number; - updatedAt: number; - termEntries: CharacterDictionaryTermEntry[]; - images: CharacterDictionarySnapshotImage[]; -}; - -const CHARACTER_DICTIONARY_FORMAT_VERSION = 15; -const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary'; - -type AniListSearchResponse = { - Page?: { - media?: Array<{ - id: number; - episodes?: number | null; - title?: { - romaji?: string | null; - english?: string | null; - native?: string | null; - }; - }>; - }; -}; - -type AniListCharacterPageResponse = { - Media?: { - title?: { - romaji?: string | null; - english?: string | null; - native?: string | null; - }; - characters?: { - pageInfo?: { - hasNextPage?: boolean | null; - }; - edges?: Array<{ - role?: string | null; - voiceActors?: Array<{ - id: number; - name?: { - full?: string | null; - native?: string | null; - } | null; - image?: { - large?: string | null; - medium?: string | null; - } | null; - }> | null; - node?: { - id: number; - description?: string | null; - image?: { - large?: string | null; - medium?: string | null; - } | null; - gender?: string | null; - age?: string | number | null; - dateOfBirth?: { - month?: number | null; - day?: number | null; - } | null; - bloodType?: string | null; - name?: { - first?: string | null; - full?: string | null; - last?: string | null; - native?: string | null; - alternative?: Array | null; - } | null; - } | null; - } | null>; - } | null; - } | null; -}; - -type VoiceActorRecord = { - id: number; - fullName: string; - nativeName: string; - imageUrl: string | null; -}; - -type CharacterRecord = { - id: number; - role: CharacterDictionaryRole; - firstNameHint: string; - fullName: string; - lastNameHint: string; - nativeName: string; - alternativeNames: string[]; - bloodType: string; - birthday: CharacterBirthday | null; - description: string; - imageUrl: string | null; - age: string; - sex: string; - voiceActors: VoiceActorRecord[]; -}; - -type ZipEntry = { - name: string; - data: Buffer; - crc32: number; - localHeaderOffset: number; -}; - -export type CharacterDictionaryBuildResult = { - zipPath: string; - fromCache: boolean; - mediaId: number; - mediaTitle: string; - entryCount: number; - dictionaryTitle?: string; - revision?: string; -}; - -export type CharacterDictionaryGenerateOptions = { - refreshTtlMs?: number; -}; - -export type CharacterDictionarySnapshotResult = { - mediaId: number; - mediaTitle: string; - entryCount: number; - fromCache: boolean; - updatedAt: number; -}; - -export type CharacterDictionarySnapshotProgress = { - mediaId: number; - mediaTitle: string; -}; - -export type CharacterDictionarySnapshotProgressCallbacks = { - onChecking?: (progress: CharacterDictionarySnapshotProgress) => void; - onGenerating?: (progress: CharacterDictionarySnapshotProgress) => void; -}; - -export type MergedCharacterDictionaryBuildResult = { - zipPath: string; - revision: string; - dictionaryTitle: string; - entryCount: number; -}; - -export interface CharacterDictionaryRuntimeDeps { - userDataPath: string; - getCurrentMediaPath: () => string | null; - getCurrentMediaTitle: () => string | null; - resolveMediaPathForJimaku: (mediaPath: string | null) => string | null; - guessAnilistMediaInfo: ( - mediaPath: string | null, - mediaTitle: string | null, - ) => Promise; - now: () => number; - sleep?: (ms: number) => Promise; - logInfo?: (message: string) => void; - logWarn?: (message: string) => void; - getCollapsibleSectionOpenState?: ( - section: AnilistCharacterDictionaryCollapsibleSectionKey, - ) => boolean; -} - -type ResolvedAniListMedia = { - id: number; - title: string; -}; +export type { + CharacterDictionaryBuildResult, + CharacterDictionaryGenerateOptions, + CharacterDictionaryRuntimeDeps, + CharacterDictionarySnapshot, + CharacterDictionarySnapshotProgress, + CharacterDictionarySnapshotProgressCallbacks, + CharacterDictionarySnapshotResult, + MergedCharacterDictionaryBuildResult, +} from './character-dictionary-runtime/types'; function sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } -function normalizeTitle(value: string): string { - return value.trim().toLowerCase().replace(/\s+/g, ' '); -} - -function pickAniListSearchResult( - title: string, - episode: number | null, - media: Array<{ - id: number; - episodes?: number | null; - title?: { - romaji?: string | null; - english?: string | null; - native?: string | null; - }; - }>, -): ResolvedAniListMedia | null { - if (media.length === 0) return null; - - const episodeFiltered = - typeof episode === 'number' && episode > 0 - ? media.filter((entry) => entry.episodes == null || entry.episodes >= episode) - : media; - const candidates = episodeFiltered.length > 0 ? episodeFiltered : media; - const normalizedInput = normalizeTitle(title); - const exact = candidates.find((entry) => { - const candidateTitles = [entry.title?.romaji, entry.title?.english, entry.title?.native] - .filter((value): value is string => typeof value === 'string' && value.trim().length > 0) - .map((value) => normalizeTitle(value)); - return candidateTitles.includes(normalizedInput); - }); - const selected = exact ?? candidates[0]!; - const selectedTitle = - selected.title?.english?.trim() || - selected.title?.romaji?.trim() || - selected.title?.native?.trim() || - title; - return { - id: selected.id, - title: selectedTitle, - }; -} - -function hasKanaOnly(value: string): boolean { - return /^[\u3040-\u309f\u30a0-\u30ffー]+$/.test(value); -} - -function katakanaToHiragana(value: string): string { - let output = ''; - for (const char of value) { - const code = char.charCodeAt(0); - if (code >= 0x30a1 && code <= 0x30f6) { - output += String.fromCharCode(code - 0x60); - continue; - } - output += char; - } - return output; -} - -function buildReading(term: string): string { - const compact = term.replace(/\s+/g, '').trim(); - if (!compact || !hasKanaOnly(compact)) { - return ''; - } - return katakanaToHiragana(compact); -} - -function containsKanji(value: string): boolean { - for (const char of value) { - const code = char.charCodeAt(0); - if ((code >= 0x4e00 && code <= 0x9fff) || (code >= 0x3400 && code <= 0x4dbf)) { - return true; - } - } - return false; -} - -function isRomanizedName(value: string): boolean { - return /^[A-Za-zĀĪŪĒŌÂÊÎÔÛāīūēōâêîôû'’.\-\s]+$/.test(value); -} - -function normalizeRomanizedName(value: string): string { - return value - .normalize('NFKC') - .toLowerCase() - .replace(/[’']/g, '') - .replace(/[.\-]/g, ' ') - .replace(/ā|â/g, 'aa') - .replace(/ī|î/g, 'ii') - .replace(/ū|û/g, 'uu') - .replace(/ē|ê/g, 'ei') - .replace(/ō|ô/g, 'ou') - .replace(/\s+/g, ' ') - .trim(); -} - -const ROMANIZED_KANA_DIGRAPHS: ReadonlyArray<[string, string]> = [ - ['kya', 'キャ'], - ['kyu', 'キュ'], - ['kyo', 'キョ'], - ['gya', 'ギャ'], - ['gyu', 'ギュ'], - ['gyo', 'ギョ'], - ['sha', 'シャ'], - ['shu', 'シュ'], - ['sho', 'ショ'], - ['sya', 'シャ'], - ['syu', 'シュ'], - ['syo', 'ショ'], - ['ja', 'ジャ'], - ['ju', 'ジュ'], - ['jo', 'ジョ'], - ['jya', 'ジャ'], - ['jyu', 'ジュ'], - ['jyo', 'ジョ'], - ['cha', 'チャ'], - ['chu', 'チュ'], - ['cho', 'チョ'], - ['tya', 'チャ'], - ['tyu', 'チュ'], - ['tyo', 'チョ'], - ['cya', 'チャ'], - ['cyu', 'チュ'], - ['cyo', 'チョ'], - ['nya', 'ニャ'], - ['nyu', 'ニュ'], - ['nyo', 'ニョ'], - ['hya', 'ヒャ'], - ['hyu', 'ヒュ'], - ['hyo', 'ヒョ'], - ['bya', 'ビャ'], - ['byu', 'ビュ'], - ['byo', 'ビョ'], - ['pya', 'ピャ'], - ['pyu', 'ピュ'], - ['pyo', 'ピョ'], - ['mya', 'ミャ'], - ['myu', 'ミュ'], - ['myo', 'ミョ'], - ['rya', 'リャ'], - ['ryu', 'リュ'], - ['ryo', 'リョ'], - ['fa', 'ファ'], - ['fi', 'フィ'], - ['fe', 'フェ'], - ['fo', 'フォ'], - ['fyu', 'フュ'], - ['fyo', 'フョ'], - ['fya', 'フャ'], - ['va', 'ヴァ'], - ['vi', 'ヴィ'], - ['vu', 'ヴ'], - ['ve', 'ヴェ'], - ['vo', 'ヴォ'], - ['she', 'シェ'], - ['che', 'チェ'], - ['je', 'ジェ'], - ['tsi', 'ツィ'], - ['tse', 'ツェ'], - ['tsa', 'ツァ'], - ['tso', 'ツォ'], - ['thi', 'ティ'], - ['thu', 'テュ'], - ['dhi', 'ディ'], - ['dhu', 'デュ'], - ['wi', 'ウィ'], - ['we', 'ウェ'], - ['wo', 'ウォ'], -]; - -const ROMANIZED_KANA_MONOGRAPHS: ReadonlyArray<[string, string]> = [ - ['a', 'ア'], - ['i', 'イ'], - ['u', 'ウ'], - ['e', 'エ'], - ['o', 'オ'], - ['ka', 'カ'], - ['ki', 'キ'], - ['ku', 'ク'], - ['ke', 'ケ'], - ['ko', 'コ'], - ['ga', 'ガ'], - ['gi', 'ギ'], - ['gu', 'グ'], - ['ge', 'ゲ'], - ['go', 'ゴ'], - ['sa', 'サ'], - ['shi', 'シ'], - ['si', 'シ'], - ['su', 'ス'], - ['se', 'セ'], - ['so', 'ソ'], - ['za', 'ザ'], - ['ji', 'ジ'], - ['zi', 'ジ'], - ['zu', 'ズ'], - ['ze', 'ゼ'], - ['zo', 'ゾ'], - ['ta', 'タ'], - ['chi', 'チ'], - ['ti', 'チ'], - ['tsu', 'ツ'], - ['tu', 'ツ'], - ['te', 'テ'], - ['to', 'ト'], - ['da', 'ダ'], - ['de', 'デ'], - ['do', 'ド'], - ['na', 'ナ'], - ['ni', 'ニ'], - ['nu', 'ヌ'], - ['ne', 'ネ'], - ['no', 'ノ'], - ['ha', 'ハ'], - ['hi', 'ヒ'], - ['fu', 'フ'], - ['hu', 'フ'], - ['he', 'ヘ'], - ['ho', 'ホ'], - ['ba', 'バ'], - ['bi', 'ビ'], - ['bu', 'ブ'], - ['be', 'ベ'], - ['bo', 'ボ'], - ['pa', 'パ'], - ['pi', 'ピ'], - ['pu', 'プ'], - ['pe', 'ペ'], - ['po', 'ポ'], - ['ma', 'マ'], - ['mi', 'ミ'], - ['mu', 'ム'], - ['me', 'メ'], - ['mo', 'モ'], - ['ya', 'ヤ'], - ['yu', 'ユ'], - ['yo', 'ヨ'], - ['ra', 'ラ'], - ['ri', 'リ'], - ['ru', 'ル'], - ['re', 'レ'], - ['ro', 'ロ'], - ['wa', 'ワ'], - ['w', 'ウ'], - ['wo', 'ヲ'], - ['n', 'ン'], -]; - -function romanizedTokenToKatakana(token: string): string | null { - const normalized = normalizeRomanizedName(token).replace(/\s+/g, ''); - if (!normalized || !/^[a-z]+$/.test(normalized)) { - return null; - } - - let output = ''; - for (let i = 0; i < normalized.length; ) { - const current = normalized[i]!; - const next = normalized[i + 1] ?? ''; - - if ( - i + 1 < normalized.length && - current === next && - current !== 'n' && - !'aeiou'.includes(current) - ) { - output += 'ッ'; - i += 1; - continue; - } - - if (current === 'n' && next.length > 0 && next !== 'y' && !'aeiou'.includes(next)) { - output += 'ン'; - i += 1; - continue; - } - - const digraph = ROMANIZED_KANA_DIGRAPHS.find(([romaji]) => normalized.startsWith(romaji, i)); - if (digraph) { - output += digraph[1]; - i += digraph[0].length; - continue; - } - - const monograph = ROMANIZED_KANA_MONOGRAPHS.find(([romaji]) => - normalized.startsWith(romaji, i), - ); - if (monograph) { - output += monograph[1]; - i += monograph[0].length; - continue; - } - - return null; - } - - return output.length > 0 ? output : null; -} - -function buildReadingFromRomanized(value: string): string { - const katakana = romanizedTokenToKatakana(value); - return katakana ? katakanaToHiragana(katakana) : ''; -} - -function buildReadingFromHint(value: string): string { - return buildReading(value) || buildReadingFromRomanized(value); -} - -function scoreJapaneseNamePartLength(length: number): number { - if (length === 2) return 3; - if (length === 1 || length === 3) return 2; - if (length === 4) return 1; - return 0; -} - -function inferJapaneseNameSplitIndex( - nameOriginal: string, - firstNameHint: string, - lastNameHint: string, -): number | null { - const chars = [...nameOriginal]; - if (chars.length < 2) return null; - - const familyHintLength = [...buildReadingFromHint(lastNameHint)].length; - const givenHintLength = [...buildReadingFromHint(firstNameHint)].length; - const totalHintLength = familyHintLength + givenHintLength; - const defaultBoundary = Math.round(chars.length / 2); - let bestIndex: number | null = null; - let bestScore = Number.NEGATIVE_INFINITY; - - for (let index = 1; index < chars.length; index += 1) { - const familyLength = index; - const givenLength = chars.length - index; - let score = - scoreJapaneseNamePartLength(familyLength) + scoreJapaneseNamePartLength(givenLength); - - if (chars.length >= 4 && familyLength >= 2 && givenLength >= 2) { - score += 1; - } - - if (totalHintLength > 0) { - const expectedFamilyLength = (chars.length * familyHintLength) / totalHintLength; - score -= Math.abs(familyLength - expectedFamilyLength) * 1.5; - } else { - score -= Math.abs(familyLength - defaultBoundary) * 0.5; - } - - if (familyLength === givenLength) { - score += 0.25; - } - - if (score > bestScore) { - bestScore = score; - bestIndex = index; - } - } - - return bestIndex; -} - -function addRomanizedKanaAliases(values: Iterable): string[] { - const aliases = new Set(); - for (const value of values) { - const trimmed = value.trim(); - if (!trimmed || !isRomanizedName(trimmed)) continue; - const katakana = romanizedTokenToKatakana(trimmed); - if (katakana) { - aliases.add(katakana); - } - } - return [...aliases]; -} - -function splitJapaneseName( - nameOriginal: string, - firstNameHint?: string, - lastNameHint?: string, -): JapaneseNameParts { - const trimmed = nameOriginal.trim(); - if (!trimmed) { - return { - hasSpace: false, - original: '', - combined: '', - family: null, - given: null, - }; - } - - const normalizedSpace = trimmed.replace(/[\s\u3000]+/g, ' ').trim(); - const spaceParts = normalizedSpace.split(' ').filter((part) => part.length > 0); - if (spaceParts.length === 2) { - const family = spaceParts[0]!; - const given = spaceParts[1]!; - return { - hasSpace: true, - original: normalizedSpace, - combined: `${family}${given}`, - family, - given, - }; - } - - const middleDotParts = trimmed - .split(/[・・·•]/) - .map((part) => part.trim()) - .filter((part) => part.length > 0); - if (middleDotParts.length === 2) { - const family = middleDotParts[0]!; - const given = middleDotParts[1]!; - return { - hasSpace: true, - original: trimmed, - combined: `${family}${given}`, - family, - given, - }; - } - - const hintedFirst = firstNameHint?.trim() || ''; - const hintedLast = lastNameHint?.trim() || ''; - if (hintedFirst && hintedLast) { - const familyGiven = `${hintedLast}${hintedFirst}`; - if (trimmed === familyGiven) { - return { - hasSpace: true, - original: trimmed, - combined: familyGiven, - family: hintedLast, - given: hintedFirst, - }; - } - - const givenFamily = `${hintedFirst}${hintedLast}`; - if (trimmed === givenFamily) { - return { - hasSpace: true, - original: trimmed, - combined: givenFamily, - family: hintedFirst, - given: hintedLast, - }; - } - } - - if (hintedFirst && hintedLast && containsKanji(trimmed)) { - const splitIndex = inferJapaneseNameSplitIndex(trimmed, hintedFirst, hintedLast); - if (splitIndex != null) { - const chars = [...trimmed]; - const family = chars.slice(0, splitIndex).join(''); - const given = chars.slice(splitIndex).join(''); - if (family && given) { - return { - hasSpace: true, - original: trimmed, - combined: trimmed, - family, - given, - }; - } - } - } - - return { - hasSpace: false, - original: trimmed, - combined: trimmed, - family: null, - given: null, - }; -} - -function generateNameReadings( - nameOriginal: string, - romanizedName: string, - firstNameHint?: string, - lastNameHint?: string, -): NameReadings { - const trimmed = nameOriginal.trim(); - if (!trimmed) { - return { - hasSpace: false, - original: '', - full: '', - family: '', - given: '', - }; - } - - const nameParts = splitJapaneseName(trimmed, firstNameHint, lastNameHint); - if (!nameParts.hasSpace || !nameParts.family || !nameParts.given) { - const full = containsKanji(trimmed) - ? buildReadingFromRomanized(romanizedName) - : buildReading(trimmed); - return { - hasSpace: false, - original: trimmed, - full, - family: full, - given: full, - }; - } - - const romanizedParts = romanizedName - .trim() - .split(/\s+/) - .filter((part) => part.length > 0); - const familyFromHints = buildReadingFromHint(lastNameHint || ''); - const givenFromHints = buildReadingFromHint(firstNameHint || ''); - const familyRomajiFallback = romanizedParts[0] || ''; - const givenRomajiFallback = romanizedParts.slice(1).join(' '); - const family = - familyFromHints || - (containsKanji(nameParts.family) - ? buildReadingFromRomanized(familyRomajiFallback) - : buildReading(nameParts.family)); - const given = - givenFromHints || - (containsKanji(nameParts.given) - ? buildReadingFromRomanized(givenRomajiFallback) - : buildReading(nameParts.given)); - const full = - `${family}${given}` || buildReading(trimmed) || buildReadingFromRomanized(romanizedName); - - return { - hasSpace: true, - original: nameParts.original, - full, - family, - given, - }; -} - -function expandRawNameVariants(rawName: string): string[] { - const trimmed = rawName.trim(); - if (!trimmed) return []; - - const variants = new Set([trimmed]); - const outer = trimmed - .replace(/[((][^()()]+[))]/g, ' ') - .replace(/\s+/g, ' ') - .trim(); - if (outer && outer !== trimmed) { - variants.add(outer); - } - - for (const match of trimmed.matchAll(/[((]([^()()]+)[))]/g)) { - const inner = match[1]?.trim() || ''; - if (inner) { - variants.add(inner); - } - } - - return [...variants]; -} - -function buildNameTerms(character: CharacterRecord): string[] { - const base = new Set(); - const rawNames = [character.nativeName, character.fullName, ...character.alternativeNames]; - for (const rawName of rawNames) { - for (const name of expandRawNameVariants(rawName)) { - base.add(name); - - const compact = name.replace(/[\s\u3000]+/g, ''); - if (compact && compact !== name) { - base.add(compact); - } - - const noMiddleDots = compact.replace(/[・・·•]/g, ''); - if (noMiddleDots && noMiddleDots !== compact) { - base.add(noMiddleDots); - } - - const split = name.split(/[\s\u3000]+/).filter((part) => part.trim().length > 0); - if (split.length === 2) { - base.add(split[0]!); - base.add(split[1]!); - } - - const splitByMiddleDot = name - .split(/[・・·•]/) - .map((part) => part.trim()) - .filter((part) => part.length > 0); - if (splitByMiddleDot.length >= 2) { - for (const part of splitByMiddleDot) { - base.add(part); - } - } - } - } - - const nativeParts = splitJapaneseName( - character.nativeName, - character.firstNameHint, - character.lastNameHint, - ); - if (nativeParts.family) { - base.add(nativeParts.family); - } - if (nativeParts.given) { - base.add(nativeParts.given); - } - - const withHonorifics = new Set(); - for (const entry of base) { - withHonorifics.add(entry); - for (const suffix of HONORIFIC_SUFFIXES) { - withHonorifics.add(`${entry}${suffix.term}`); - } - } - - for (const alias of addRomanizedKanaAliases(withHonorifics)) { - withHonorifics.add(alias); - for (const suffix of HONORIFIC_SUFFIXES) { - withHonorifics.add(`${alias}${suffix.term}`); - } - } - - return [...withHonorifics].filter((entry) => entry.trim().length > 0); -} - -const MONTH_NAMES: ReadonlyArray<[number, string]> = [ - [1, 'January'], - [2, 'February'], - [3, 'March'], - [4, 'April'], - [5, 'May'], - [6, 'June'], - [7, 'July'], - [8, 'August'], - [9, 'September'], - [10, 'October'], - [11, 'November'], - [12, 'December'], -]; - -const SEX_DISPLAY: ReadonlyArray<[string, string]> = [ - ['m', '♂ Male'], - ['f', '♀ Female'], - ['male', '♂ Male'], - ['female', '♀ Female'], -]; - -function formatBirthday(birthday: CharacterBirthday | null): string { - if (!birthday) return ''; - const [month, day] = birthday; - const monthName = MONTH_NAMES.find(([m]) => m === month)?.[1] || 'Unknown'; - return `${monthName} ${day}`; -} - -function formatCharacterStats(character: CharacterRecord): string { - const parts: string[] = []; - const normalizedSex = character.sex.trim().toLowerCase(); - const sexDisplay = SEX_DISPLAY.find(([key]) => key === normalizedSex)?.[1]; - if (sexDisplay) parts.push(sexDisplay); - if (character.age.trim()) parts.push(`${character.age.trim()} years`); - if (character.bloodType.trim()) parts.push(`Blood Type ${character.bloodType.trim()}`); - const birthday = formatBirthday(character.birthday); - if (birthday) parts.push(`Birthday: ${birthday}`); - return parts.join(' • '); -} - -function buildReadingForTerm( - term: string, - character: CharacterRecord, - readings: NameReadings, - nameParts: JapaneseNameParts, -): string { - for (const suffix of HONORIFIC_SUFFIXES) { - if (term.endsWith(suffix.term) && term.length > suffix.term.length) { - const baseTerm = term.slice(0, -suffix.term.length); - const baseReading = buildReadingForTerm(baseTerm, character, readings, nameParts); - return baseReading ? `${baseReading}${suffix.reading}` : ''; - } - } - - const compactNative = character.nativeName.replace(/[\s\u3000]+/g, ''); - const noMiddleDotsNative = compactNative.replace(/[・・·•]/g, ''); - if ( - term === character.nativeName || - term === compactNative || - term === noMiddleDotsNative || - term === nameParts.original || - term === nameParts.combined - ) { - return readings.full; - } - - const familyCompact = nameParts.family?.replace(/[・・·•]/g, '') || ''; - if (nameParts.family && (term === nameParts.family || term === familyCompact)) { - return readings.family; - } - - const givenCompact = nameParts.given?.replace(/[・・·•]/g, '') || ''; - if (nameParts.given && (term === nameParts.given || term === givenCompact)) { - return readings.given; - } - - const compact = term.replace(/[\s\u3000]+/g, ''); - if (hasKanaOnly(compact)) { - return buildReading(compact); - } - - if (isRomanizedName(term)) { - return buildReadingFromRomanized(term) || readings.full; - } - - return ''; -} - -function parseCharacterDescription(raw: string): { - fields: Array<{ key: string; value: string }>; - text: string; -} { - const cleaned = raw.replace(//gi, '\n').replace(/<[^>]+>/g, ' '); - const lines = cleaned.split(/\n/); - const fields: Array<{ key: string; value: string }> = []; - const textLines: string[] = []; - - for (const line of lines) { - const trimmed = line.trim(); - if (!trimmed) continue; - const match = trimmed.match(/^__([^_]+):__\s*(.+)$/); - if (match) { - const value = match[2]! - .replace(/__([^_]+)__/g, '$1') - .replace(/\*\*([^*]+)\*\*/g, '$1') - .replace(/_([^_]+)_/g, '$1') - .replace(/\*([^*]+)\*/g, '$1') - .trim(); - fields.push({ key: match[1]!.trim(), value }); - } else { - textLines.push(trimmed); - } - } - - const text = textLines - .join(' ') - .replace(/\[([^\]]+)\]\((https?:\/\/[^)\s]+)\)/g, '$1') - .replace(/https?:\/\/\S+/g, '') - .replace(/__([^_]+)__/g, '$1') - .replace(/\*\*([^*]+)\*\*/g, '$1') - .replace(/~!/g, '') - .replace(/!~/g, '') - .replace(/\s+/g, ' ') - .trim(); - - return { fields, text }; -} - -function roleInfo(role: CharacterDictionaryRole): { tag: string; score: number } { - if (role === 'main') return { tag: 'main', score: 100 }; - if (role === 'primary') return { tag: 'primary', score: 75 }; - if (role === 'side') return { tag: 'side', score: 50 }; - return { tag: 'appears', score: 25 }; -} - -function mapRole(input: string | null | undefined): CharacterDictionaryRole { - const value = (input || '').trim().toUpperCase(); - if (value === 'MAIN') return 'main'; - if (value === 'SUPPORTING') return 'primary'; - if (value === 'BACKGROUND') return 'side'; - return 'side'; -} - -function roleLabel(role: CharacterDictionaryRole): string { - if (role === 'main') return 'Protagonist'; - if (role === 'primary') return 'Main Character'; - if (role === 'side') return 'Side Character'; - return 'Minor Role'; -} - -function inferImageExt(contentType: string | null): string { - const normalized = (contentType || '').toLowerCase(); - if (normalized.includes('png')) return 'png'; - if (normalized.includes('gif')) return 'gif'; - if (normalized.includes('webp')) return 'webp'; - return 'jpg'; -} - -function ensureDir(dirPath: string): void { - if (fs.existsSync(dirPath)) return; - fs.mkdirSync(dirPath, { recursive: true }); -} - function expandUserPath(input: string): string { if (input.startsWith('~')) { return path.join(os.homedir(), input.slice(1)); @@ -1098,898 +130,6 @@ function resolveDictionaryGuessInputs(targetPath: string): { throw new Error(`Dictionary target must be a file or directory path: ${targetPath}`); } -function getSnapshotsDir(outputDir: string): string { - return path.join(outputDir, 'snapshots'); -} - -function getSnapshotPath(outputDir: string, mediaId: number): string { - return path.join(getSnapshotsDir(outputDir), `anilist-${mediaId}.json`); -} - -function getMergedZipPath(outputDir: string): string { - return path.join(outputDir, 'merged.zip'); -} - -function readSnapshot(snapshotPath: string): CharacterDictionarySnapshot | null { - try { - const raw = fs.readFileSync(snapshotPath, 'utf8'); - const parsed = JSON.parse(raw) as Partial; - if (!parsed || typeof parsed !== 'object') { - return null; - } - if ( - parsed.formatVersion !== CHARACTER_DICTIONARY_FORMAT_VERSION || - typeof parsed.mediaId !== 'number' || - typeof parsed.mediaTitle !== 'string' || - typeof parsed.entryCount !== 'number' || - typeof parsed.updatedAt !== 'number' || - !Array.isArray(parsed.termEntries) || - !Array.isArray(parsed.images) - ) { - return null; - } - return { - formatVersion: parsed.formatVersion, - mediaId: parsed.mediaId, - mediaTitle: parsed.mediaTitle, - entryCount: parsed.entryCount, - updatedAt: parsed.updatedAt, - termEntries: parsed.termEntries as CharacterDictionaryTermEntry[], - images: parsed.images as CharacterDictionarySnapshotImage[], - }; - } catch { - return null; - } -} - -function writeSnapshot(snapshotPath: string, snapshot: CharacterDictionarySnapshot): void { - ensureDir(path.dirname(snapshotPath)); - fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2), 'utf8'); -} - -function roleBadgeStyle(role: CharacterDictionaryRole): Record { - const base = { - borderRadius: '4px', - padding: '0.15em 0.5em', - fontSize: '0.8em', - fontWeight: 'bold', - color: '#fff', - }; - if (role === 'main') return { ...base, backgroundColor: '#4CAF50' }; - if (role === 'primary') return { ...base, backgroundColor: '#2196F3' }; - if (role === 'side') return { ...base, backgroundColor: '#FF9800' }; - return { ...base, backgroundColor: '#9E9E9E' }; -} - -function buildCollapsibleSection( - title: string, - open: boolean, - body: Array> | string | Record, -): Record { - return { - tag: 'details', - open, - style: { marginTop: '0.4em' }, - content: [ - { - tag: 'summary', - style: { fontWeight: 'bold', fontSize: '0.95em', cursor: 'pointer' }, - content: title, - }, - { - tag: 'div', - style: { padding: '0.25em 0 0 0.4em', fontSize: '0.9em' }, - content: body, - }, - ], - }; -} - -function buildVoicedByContent( - voiceActors: VoiceActorRecord[], - vaImagePaths: Map, -): Record { - if (voiceActors.length === 1) { - const va = voiceActors[0]!; - const vaImgPath = vaImagePaths.get(va.id); - const vaLabel = va.nativeName - ? va.fullName - ? `${va.nativeName} (${va.fullName})` - : va.nativeName - : va.fullName; - - if (vaImgPath) { - return { - tag: 'table', - content: { - tag: 'tr', - content: [ - { - tag: 'td', - style: { - verticalAlign: 'top', - padding: '0', - paddingRight: '0.4em', - borderWidth: '0', - }, - content: { - tag: 'img', - path: vaImgPath, - width: 3, - height: 3, - sizeUnits: 'em', - title: vaLabel, - alt: vaLabel, - collapsed: false, - collapsible: false, - background: true, - }, - }, - { - tag: 'td', - style: { verticalAlign: 'middle', padding: '0', borderWidth: '0' }, - content: vaLabel, - }, - ], - }, - }; - } - - return { tag: 'div', content: vaLabel }; - } - - const items: Array> = []; - for (const va of voiceActors) { - const vaLabel = va.nativeName - ? va.fullName - ? `${va.nativeName} (${va.fullName})` - : va.nativeName - : va.fullName; - items.push({ tag: 'li', content: vaLabel }); - } - return { tag: 'ul', style: { marginTop: '0.15em' }, content: items }; -} - -function createDefinitionGlossary( - character: CharacterRecord, - mediaTitle: string, - imagePath: string | null, - vaImagePaths: Map, - getCollapsibleSectionOpenState: ( - section: AnilistCharacterDictionaryCollapsibleSectionKey, - ) => boolean, -): CharacterDictionaryGlossaryEntry[] { - const displayName = character.nativeName || character.fullName || `Character ${character.id}`; - const secondaryName = - character.nativeName && character.fullName && character.fullName !== character.nativeName - ? character.fullName - : null; - const { fields, text: descriptionText } = parseCharacterDescription(character.description); - - const content: Array> = [ - { - tag: 'div', - style: { fontWeight: 'bold', fontSize: '1.1em', marginBottom: '0.1em' }, - content: displayName, - }, - ]; - - if (secondaryName) { - content.push({ - tag: 'div', - style: { fontSize: '0.85em', fontStyle: 'italic', color: '#b0b0b0', marginBottom: '0.2em' }, - content: secondaryName, - }); - } - - if (imagePath) { - content.push({ - tag: 'div', - style: { marginTop: '0.3em', marginBottom: '0.3em' }, - content: { - tag: 'img', - path: imagePath, - width: 8, - height: 11, - sizeUnits: 'em', - title: displayName, - alt: displayName, - description: `${displayName} · ${mediaTitle}`, - collapsed: false, - collapsible: false, - background: true, - }, - }); - } - - content.push({ - tag: 'div', - style: { fontSize: '0.8em', color: '#999', marginBottom: '0.2em' }, - content: `From: ${mediaTitle}`, - }); - - content.push({ - tag: 'div', - style: { marginBottom: '0.15em' }, - content: { - tag: 'span', - style: roleBadgeStyle(character.role), - content: roleLabel(character.role), - }, - }); - - const statsLine = formatCharacterStats(character); - if (descriptionText) { - content.push( - buildCollapsibleSection( - 'Description', - getCollapsibleSectionOpenState('description'), - descriptionText, - ), - ); - } - - const fieldItems: Array> = []; - if (statsLine) { - fieldItems.push({ - tag: 'li', - style: { fontWeight: 'bold' }, - content: statsLine, - }); - } - fieldItems.push( - ...fields.map((f) => ({ - tag: 'li', - content: `${f.key}: ${f.value}`, - })), - ); - if (fieldItems.length > 0) { - content.push( - buildCollapsibleSection( - 'Character Information', - getCollapsibleSectionOpenState('characterInformation'), - { - tag: 'ul', - style: { marginTop: '0.15em' }, - content: fieldItems, - }, - ), - ); - } - - if (character.voiceActors.length > 0) { - content.push( - buildCollapsibleSection( - 'Voiced by', - getCollapsibleSectionOpenState('voicedBy'), - buildVoicedByContent(character.voiceActors, vaImagePaths), - ), - ); - } - - return [ - { - type: 'structured-content', - content: { tag: 'div', content }, - }, - ]; -} - -function buildSnapshotImagePath(mediaId: number, charId: number, ext: string): string { - return `img/m${mediaId}-c${charId}.${ext}`; -} - -function buildVaImagePath(mediaId: number, vaId: number, ext: string): string { - return `img/m${mediaId}-va${vaId}.${ext}`; -} - -function buildTermEntry( - term: string, - reading: string, - role: CharacterDictionaryRole, - glossary: CharacterDictionaryGlossaryEntry[], -): CharacterDictionaryTermEntry { - const { tag, score } = roleInfo(role); - return [term, reading, `name ${tag}`, '', score, glossary, 0, '']; -} - -const CRC32_TABLE = (() => { - const table = new Uint32Array(256); - for (let i = 0; i < 256; i += 1) { - let crc = i; - for (let j = 0; j < 8; j += 1) { - crc = (crc & 1) !== 0 ? 0xedb88320 ^ (crc >>> 1) : crc >>> 1; - } - table[i] = crc >>> 0; - } - return table; -})(); - -function crc32(data: Buffer): number { - let crc = 0xffffffff; - for (const byte of data) { - crc = CRC32_TABLE[(crc ^ byte) & 0xff]! ^ (crc >>> 8); - } - return (crc ^ 0xffffffff) >>> 0; -} - -function createStoredZip(files: Array<{ name: string; data: Buffer }>): Buffer { - const chunks: Buffer[] = []; - const entries: ZipEntry[] = []; - let offset = 0; - - for (const file of files) { - const fileName = Buffer.from(file.name, 'utf8'); - const fileData = file.data; - const fileCrc32 = crc32(fileData); - const local = Buffer.alloc(30 + fileName.length); - let cursor = 0; - local.writeUInt32LE(0x04034b50, cursor); - cursor += 4; - local.writeUInt16LE(20, cursor); - cursor += 2; - local.writeUInt16LE(0, cursor); - cursor += 2; - local.writeUInt16LE(0, cursor); - cursor += 2; - local.writeUInt16LE(0, cursor); - cursor += 2; - local.writeUInt16LE(0, cursor); - cursor += 2; - local.writeUInt32LE(fileCrc32, cursor); - cursor += 4; - local.writeUInt32LE(fileData.length, cursor); - cursor += 4; - local.writeUInt32LE(fileData.length, cursor); - cursor += 4; - local.writeUInt16LE(fileName.length, cursor); - cursor += 2; - local.writeUInt16LE(0, cursor); - cursor += 2; - fileName.copy(local, cursor); - - chunks.push(local, fileData); - entries.push({ - name: file.name, - data: fileData, - crc32: fileCrc32, - localHeaderOffset: offset, - }); - offset += local.length + fileData.length; - } - - const centralStart = offset; - const centralChunks: Buffer[] = []; - for (const entry of entries) { - const fileName = Buffer.from(entry.name, 'utf8'); - const central = Buffer.alloc(46 + fileName.length); - let cursor = 0; - central.writeUInt32LE(0x02014b50, cursor); - cursor += 4; - central.writeUInt16LE(20, cursor); - cursor += 2; - central.writeUInt16LE(20, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt32LE(entry.crc32, cursor); - cursor += 4; - central.writeUInt32LE(entry.data.length, cursor); - cursor += 4; - central.writeUInt32LE(entry.data.length, cursor); - cursor += 4; - central.writeUInt16LE(fileName.length, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt16LE(0, cursor); - cursor += 2; - central.writeUInt32LE(0, cursor); - cursor += 4; - central.writeUInt32LE(entry.localHeaderOffset, cursor); - cursor += 4; - fileName.copy(central, cursor); - centralChunks.push(central); - offset += central.length; - } - - const centralSize = offset - centralStart; - const end = Buffer.alloc(22); - let cursor = 0; - end.writeUInt32LE(0x06054b50, cursor); - cursor += 4; - end.writeUInt16LE(0, cursor); - cursor += 2; - end.writeUInt16LE(0, cursor); - cursor += 2; - end.writeUInt16LE(entries.length, cursor); - cursor += 2; - end.writeUInt16LE(entries.length, cursor); - cursor += 2; - end.writeUInt32LE(centralSize, cursor); - cursor += 4; - end.writeUInt32LE(centralStart, cursor); - cursor += 4; - end.writeUInt16LE(0, cursor); - - return Buffer.concat([...chunks, ...centralChunks, end]); -} - -async function fetchAniList( - query: string, - variables: Record, - beforeRequest?: () => Promise, -): Promise { - if (beforeRequest) { - await beforeRequest(); - } - const response = await fetch(ANILIST_GRAPHQL_URL, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - query, - variables, - }), - }); - if (!response.ok) { - throw new Error(`AniList request failed (${response.status})`); - } - const payload = (await response.json()) as { - data?: T; - errors?: Array<{ message?: string }>; - }; - const firstError = payload.errors?.find((entry) => entry && typeof entry.message === 'string'); - if (firstError?.message) { - throw new Error(firstError.message); - } - if (!payload.data) { - throw new Error('AniList response missing data'); - } - return payload.data; -} - -async function resolveAniListMediaIdFromGuess( - guess: AnilistMediaGuess, - beforeRequest?: () => Promise, -): Promise { - const data = await fetchAniList( - ` - query($search: String!) { - Page(perPage: 10) { - media(search: $search, type: ANIME, sort: [SEARCH_MATCH, POPULARITY_DESC]) { - id - episodes - title { - romaji - english - native - } - } - } - } - `, - { - search: guess.title, - }, - beforeRequest, - ); - - const media = data.Page?.media ?? []; - const resolved = pickAniListSearchResult(guess.title, guess.episode, media); - if (!resolved) { - throw new Error(`No AniList media match found for "${guess.title}".`); - } - return resolved; -} - -async function fetchCharactersForMedia( - mediaId: number, - beforeRequest?: () => Promise, - onPageFetched?: (page: number) => void, -): Promise<{ - mediaTitle: string; - characters: CharacterRecord[]; -}> { - const characters: CharacterRecord[] = []; - let page = 1; - let mediaTitle = ''; - for (;;) { - const data = await fetchAniList( - ` - query($id: Int!, $page: Int!) { - Media(id: $id, type: ANIME) { - title { - romaji - english - native - } - characters(page: $page, perPage: 50, sort: [ROLE, RELEVANCE, ID]) { - pageInfo { - hasNextPage - } - edges { - role - voiceActors(language: JAPANESE) { - id - name { - full - native - } - image { - medium - } - } - node { - id - description(asHtml: false) - gender - age - dateOfBirth { - month - day - } - bloodType - image { - large - medium - } - name { - first - full - last - native - alternative - } - } - } - } - } - } - `, - { - id: mediaId, - page, - }, - beforeRequest, - ); - onPageFetched?.(page); - - const media = data.Media; - if (!media) { - throw new Error(`AniList media ${mediaId} not found.`); - } - if (!mediaTitle) { - mediaTitle = - media.title?.english?.trim() || - media.title?.romaji?.trim() || - media.title?.native?.trim() || - `AniList ${mediaId}`; - } - - const edges = media.characters?.edges ?? []; - for (const edge of edges) { - const node = edge?.node; - if (!node || typeof node.id !== 'number') continue; - const firstNameHint = node.name?.first?.trim() || ''; - const fullName = node.name?.full?.trim() || ''; - const lastNameHint = node.name?.last?.trim() || ''; - const nativeName = node.name?.native?.trim() || ''; - const alternativeNames = [ - ...new Set( - (node.name?.alternative ?? []) - .filter((value): value is string => typeof value === 'string') - .map((value) => value.trim()) - .filter((value) => value.length > 0), - ), - ]; - if (!nativeName) continue; - const voiceActors: VoiceActorRecord[] = []; - for (const va of edge?.voiceActors ?? []) { - if (!va || typeof va.id !== 'number') continue; - const vaFull = va.name?.full?.trim() || ''; - const vaNative = va.name?.native?.trim() || ''; - if (!vaFull && !vaNative) continue; - voiceActors.push({ - id: va.id, - fullName: vaFull, - nativeName: vaNative, - imageUrl: va.image?.medium || null, - }); - } - characters.push({ - id: node.id, - role: mapRole(edge?.role), - firstNameHint, - fullName, - lastNameHint, - nativeName, - alternativeNames, - bloodType: node.bloodType?.trim() || '', - birthday: - typeof node.dateOfBirth?.month === 'number' && typeof node.dateOfBirth?.day === 'number' - ? [node.dateOfBirth.month, node.dateOfBirth.day] - : null, - description: node.description || '', - imageUrl: node.image?.large || node.image?.medium || null, - age: - typeof node.age === 'string' - ? node.age.trim() - : typeof node.age === 'number' - ? String(node.age) - : '', - sex: node.gender?.trim() || '', - voiceActors, - }); - } - - const hasNextPage = Boolean(media.characters?.pageInfo?.hasNextPage); - if (!hasNextPage) { - break; - } - page += 1; - } - - return { - mediaTitle, - characters, - }; -} - -async function downloadCharacterImage( - imageUrl: string, - charId: number, -): Promise<{ - filename: string; - ext: string; - bytes: Buffer; -} | null> { - try { - const response = await fetch(imageUrl); - if (!response.ok) return null; - const bytes = Buffer.from(await response.arrayBuffer()); - if (bytes.length === 0) return null; - const ext = inferImageExt(response.headers.get('content-type')); - return { - filename: `c${charId}.${ext}`, - ext, - bytes, - }; - } catch { - return null; - } -} - -function buildDictionaryTitle(mediaId: number): string { - return `SubMiner Character Dictionary (AniList ${mediaId})`; -} - -function createIndex( - dictionaryTitle: string, - description: string, - revision: string, -): Record { - return { - title: dictionaryTitle, - revision, - format: 3, - author: 'SubMiner', - description, - }; -} - -function createTagBank(): Array<[string, string, number, string, number]> { - return [ - ['name', 'partOfSpeech', 0, 'Character name', 0], - ['main', 'name', 0, 'Protagonist', 0], - ['primary', 'name', 0, 'Main character', 0], - ['side', 'name', 0, 'Side character', 0], - ['appears', 'name', 0, 'Minor appearance', 0], - ]; -} - -function buildSnapshotFromCharacters( - mediaId: number, - mediaTitle: string, - characters: CharacterRecord[], - imagesByCharacterId: Map, - imagesByVaId: Map, - updatedAt: number, - getCollapsibleSectionOpenState: ( - section: AnilistCharacterDictionaryCollapsibleSectionKey, - ) => boolean, -): CharacterDictionarySnapshot { - const termEntries: CharacterDictionaryTermEntry[] = []; - - for (const character of characters) { - const seenTerms = new Set(); - const imagePath = imagesByCharacterId.get(character.id)?.path ?? null; - const vaImagePaths = new Map(); - for (const va of character.voiceActors) { - const vaImg = imagesByVaId.get(va.id); - if (vaImg) vaImagePaths.set(va.id, vaImg.path); - } - const glossary = createDefinitionGlossary( - character, - mediaTitle, - imagePath, - vaImagePaths, - getCollapsibleSectionOpenState, - ); - const candidateTerms = buildNameTerms(character); - const nameParts = splitJapaneseName( - character.nativeName, - character.firstNameHint, - character.lastNameHint, - ); - const readings = generateNameReadings( - character.nativeName, - character.fullName, - character.firstNameHint, - character.lastNameHint, - ); - for (const term of candidateTerms) { - if (seenTerms.has(term)) continue; - seenTerms.add(term); - const reading = buildReadingForTerm(term, character, readings, nameParts); - termEntries.push(buildTermEntry(term, reading, character.role, glossary)); - } - } - - if (termEntries.length === 0) { - throw new Error('No dictionary entries generated from AniList character data.'); - } - - return { - formatVersion: CHARACTER_DICTIONARY_FORMAT_VERSION, - mediaId, - mediaTitle, - entryCount: termEntries.length, - updatedAt, - termEntries, - images: [...imagesByCharacterId.values(), ...imagesByVaId.values()], - }; -} - -function getCollapsibleSectionKeyFromTitle( - title: string, -): AnilistCharacterDictionaryCollapsibleSectionKey | null { - if (title === 'Description') return 'description'; - if (title === 'Character Information') return 'characterInformation'; - if (title === 'Voiced by') return 'voicedBy'; - return null; -} - -function applyCollapsibleOpenStatesToStructuredValue( - value: unknown, - getCollapsibleSectionOpenState: ( - section: AnilistCharacterDictionaryCollapsibleSectionKey, - ) => boolean, -): unknown { - if (Array.isArray(value)) { - return value.map((item) => - applyCollapsibleOpenStatesToStructuredValue(item, getCollapsibleSectionOpenState), - ); - } - if (!value || typeof value !== 'object') { - return value; - } - - const record = value as Record; - const next: Record = {}; - for (const [key, child] of Object.entries(record)) { - next[key] = applyCollapsibleOpenStatesToStructuredValue(child, getCollapsibleSectionOpenState); - } - - if (record.tag === 'details') { - const content = Array.isArray(record.content) ? record.content : []; - const summary = content[0]; - if (summary && typeof summary === 'object' && !Array.isArray(summary)) { - const summaryContent = (summary as Record).content; - if (typeof summaryContent === 'string') { - const section = getCollapsibleSectionKeyFromTitle(summaryContent); - if (section) { - next.open = getCollapsibleSectionOpenState(section); - } - } - } - } - - return next; -} - -function applyCollapsibleOpenStatesToTermEntries( - termEntries: CharacterDictionaryTermEntry[], - getCollapsibleSectionOpenState: ( - section: AnilistCharacterDictionaryCollapsibleSectionKey, - ) => boolean, -): CharacterDictionaryTermEntry[] { - return termEntries.map((entry) => { - const glossary = entry[5].map((item) => - applyCollapsibleOpenStatesToStructuredValue(item, getCollapsibleSectionOpenState), - ) as CharacterDictionaryGlossaryEntry[]; - return [...entry.slice(0, 5), glossary, ...entry.slice(6)] as CharacterDictionaryTermEntry; - }); -} - -function buildDictionaryZip( - outputPath: string, - dictionaryTitle: string, - description: string, - revision: string, - termEntries: CharacterDictionaryTermEntry[], - images: CharacterDictionarySnapshotImage[], -): { zipPath: string; entryCount: number } { - const zipFiles: Array<{ name: string; data: Buffer }> = [ - { - name: 'index.json', - data: Buffer.from( - JSON.stringify(createIndex(dictionaryTitle, description, revision), null, 2), - 'utf8', - ), - }, - { - name: 'tag_bank_1.json', - data: Buffer.from(JSON.stringify(createTagBank()), 'utf8'), - }, - ]; - - for (const image of images) { - zipFiles.push({ - name: image.path, - data: Buffer.from(image.dataBase64, 'base64'), - }); - } - - const entriesPerBank = 10_000; - for (let i = 0; i < termEntries.length; i += entriesPerBank) { - zipFiles.push({ - name: `term_bank_${Math.floor(i / entriesPerBank) + 1}.json`, - data: Buffer.from(JSON.stringify(termEntries.slice(i, i + entriesPerBank)), 'utf8'), - }); - } - - ensureDir(path.dirname(outputPath)); - fs.writeFileSync(outputPath, createStoredZip(zipFiles)); - return { zipPath: outputPath, entryCount: termEntries.length }; -} - -function buildMergedRevision(mediaIds: number[], snapshots: CharacterDictionarySnapshot[]): string { - const hash = createHash('sha1'); - hash.update( - JSON.stringify({ - mediaIds, - snapshots: snapshots.map((snapshot) => ({ - mediaId: snapshot.mediaId, - updatedAt: snapshot.updatedAt, - entryCount: snapshot.entryCount, - })), - }), - ); - return hash.digest('hex').slice(0, 12); -} - -function normalizeMergedMediaIds(mediaIds: number[]): number[] { - return [ - ...new Set( - mediaIds - .filter((mediaId) => Number.isFinite(mediaId) && mediaId > 0) - .map((mediaId) => Math.floor(mediaId)), - ), - ].sort((left, right) => left - right); -} - export function createCharacterDictionaryRuntimeService(deps: CharacterDictionaryRuntimeDeps): { getOrCreateCurrentSnapshot: ( targetPath?: string, diff --git a/src/main/character-dictionary-runtime/build.test.ts b/src/main/character-dictionary-runtime/build.test.ts new file mode 100644 index 0000000..caf0755 --- /dev/null +++ b/src/main/character-dictionary-runtime/build.test.ts @@ -0,0 +1,58 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { applyCollapsibleOpenStatesToTermEntries } from './build'; +import type { CharacterDictionaryTermEntry } from './types'; + +test('applyCollapsibleOpenStatesToTermEntries reapplies configured details open states', () => { + const termEntries: CharacterDictionaryTermEntry[] = [ + [ + 'アルファ', + 'あるふぁ', + '', + '', + 0, + [ + { + type: 'structured-content', + content: { + tag: 'div', + content: [ + { + tag: 'details', + open: false, + content: [ + { tag: 'summary', content: 'Description' }, + { tag: 'div', content: 'body' }, + ], + }, + { + tag: 'details', + open: false, + content: [ + { tag: 'summary', content: 'Voiced by' }, + { tag: 'div', content: 'cv' }, + ], + }, + ], + }, + }, + ], + 0, + 'name', + ], + ]; + + const [entry] = applyCollapsibleOpenStatesToTermEntries( + termEntries, + (section) => section === 'description', + ); + assert.ok(entry); + const glossaryEntry = entry[5][0] as { + content: { + content: Array<{ open?: boolean }>; + }; + }; + + assert.equal(glossaryEntry.content.content[0]?.open, true); + assert.equal(glossaryEntry.content.content[1]?.open, false); +}); diff --git a/src/main/character-dictionary-runtime/build.ts b/src/main/character-dictionary-runtime/build.ts new file mode 100644 index 0000000..2992eb8 --- /dev/null +++ b/src/main/character-dictionary-runtime/build.ts @@ -0,0 +1,7 @@ +export { + applyCollapsibleOpenStatesToTermEntries, + buildSnapshotFromCharacters, + buildSnapshotImagePath, + buildVaImagePath, +} from './snapshot'; +export { buildDictionaryTitle, buildDictionaryZip } from './zip'; diff --git a/src/main/character-dictionary-runtime/cache.test.ts b/src/main/character-dictionary-runtime/cache.test.ts new file mode 100644 index 0000000..6f739b0 --- /dev/null +++ b/src/main/character-dictionary-runtime/cache.test.ts @@ -0,0 +1,54 @@ +import assert from 'node:assert/strict'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import test from 'node:test'; +import { getSnapshotPath, readSnapshot, writeSnapshot } from './cache'; +import { CHARACTER_DICTIONARY_FORMAT_VERSION } from './constants'; +import type { CharacterDictionarySnapshot } from './types'; + +function makeTempDir(): string { + return fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-character-dictionary-cache-')); +} + +function createSnapshot(): CharacterDictionarySnapshot { + return { + formatVersion: CHARACTER_DICTIONARY_FORMAT_VERSION, + mediaId: 130298, + mediaTitle: 'The Eminence in Shadow', + entryCount: 1, + updatedAt: 1_700_000_000_000, + termEntries: [['アルファ', 'あるふぁ', '', '', 0, ['Alpha'], 0, 'name']], + images: [ + { + path: 'img/m130298-c1.png', + dataBase64: + 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mP8/x8AAwMCAO+nmX8AAAAASUVORK5CYII=', + }, + ], + }; +} + +test('writeSnapshot persists and readSnapshot restores current-format snapshots', () => { + const outputDir = makeTempDir(); + const snapshotPath = getSnapshotPath(outputDir, 130298); + const snapshot = createSnapshot(); + + writeSnapshot(snapshotPath, snapshot); + + assert.deepEqual(readSnapshot(snapshotPath), snapshot); +}); + +test('readSnapshot ignores snapshots written with an older format version', () => { + const outputDir = makeTempDir(); + const snapshotPath = getSnapshotPath(outputDir, 130298); + const staleSnapshot = { + ...createSnapshot(), + formatVersion: CHARACTER_DICTIONARY_FORMAT_VERSION - 1, + }; + + fs.mkdirSync(path.dirname(snapshotPath), { recursive: true }); + fs.writeFileSync(snapshotPath, JSON.stringify(staleSnapshot), 'utf8'); + + assert.equal(readSnapshot(snapshotPath), null); +}); diff --git a/src/main/character-dictionary-runtime/cache.ts b/src/main/character-dictionary-runtime/cache.ts new file mode 100644 index 0000000..db5d57a --- /dev/null +++ b/src/main/character-dictionary-runtime/cache.ts @@ -0,0 +1,87 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import { createHash } from 'node:crypto'; +import { CHARACTER_DICTIONARY_FORMAT_VERSION } from './constants'; +import { ensureDir } from './fs-utils'; +import type { + CharacterDictionarySnapshot, + CharacterDictionarySnapshotImage, + CharacterDictionaryTermEntry, +} from './types'; + +function getSnapshotsDir(outputDir: string): string { + return path.join(outputDir, 'snapshots'); +} + +export function getSnapshotPath(outputDir: string, mediaId: number): string { + return path.join(getSnapshotsDir(outputDir), `anilist-${mediaId}.json`); +} + +export function getMergedZipPath(outputDir: string): string { + return path.join(outputDir, 'merged.zip'); +} + +export function readSnapshot(snapshotPath: string): CharacterDictionarySnapshot | null { + try { + const raw = fs.readFileSync(snapshotPath, 'utf8'); + const parsed = JSON.parse(raw) as Partial; + if (!parsed || typeof parsed !== 'object') { + return null; + } + if ( + parsed.formatVersion !== CHARACTER_DICTIONARY_FORMAT_VERSION || + typeof parsed.mediaId !== 'number' || + typeof parsed.mediaTitle !== 'string' || + typeof parsed.entryCount !== 'number' || + typeof parsed.updatedAt !== 'number' || + !Array.isArray(parsed.termEntries) || + !Array.isArray(parsed.images) + ) { + return null; + } + return { + formatVersion: parsed.formatVersion, + mediaId: parsed.mediaId, + mediaTitle: parsed.mediaTitle, + entryCount: parsed.entryCount, + updatedAt: parsed.updatedAt, + termEntries: parsed.termEntries as CharacterDictionaryTermEntry[], + images: parsed.images as CharacterDictionarySnapshotImage[], + }; + } catch { + return null; + } +} + +export function writeSnapshot(snapshotPath: string, snapshot: CharacterDictionarySnapshot): void { + ensureDir(path.dirname(snapshotPath)); + fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2), 'utf8'); +} + +export function buildMergedRevision( + mediaIds: number[], + snapshots: CharacterDictionarySnapshot[], +): string { + const hash = createHash('sha1'); + hash.update( + JSON.stringify({ + mediaIds, + snapshots: snapshots.map((snapshot) => ({ + mediaId: snapshot.mediaId, + updatedAt: snapshot.updatedAt, + entryCount: snapshot.entryCount, + })), + }), + ); + return hash.digest('hex').slice(0, 12); +} + +export function normalizeMergedMediaIds(mediaIds: number[]): number[] { + return [ + ...new Set( + mediaIds + .filter((mediaId) => Number.isFinite(mediaId) && mediaId > 0) + .map((mediaId) => Math.floor(mediaId)), + ), + ].sort((left, right) => left - right); +} diff --git a/src/main/character-dictionary-runtime/constants.ts b/src/main/character-dictionary-runtime/constants.ts new file mode 100644 index 0000000..c4bef98 --- /dev/null +++ b/src/main/character-dictionary-runtime/constants.ts @@ -0,0 +1,23 @@ +export const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co'; +export const ANILIST_REQUEST_DELAY_MS = 2000; +export const CHARACTER_IMAGE_DOWNLOAD_DELAY_MS = 250; +export const CHARACTER_DICTIONARY_FORMAT_VERSION = 15; +export const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary'; + +export const HONORIFIC_SUFFIXES = [ + { term: 'さん', reading: 'さん' }, + { term: '様', reading: 'さま' }, + { term: '先生', reading: 'せんせい' }, + { term: '先輩', reading: 'せんぱい' }, + { term: '後輩', reading: 'こうはい' }, + { term: '氏', reading: 'し' }, + { term: '君', reading: 'くん' }, + { term: 'くん', reading: 'くん' }, + { term: 'ちゃん', reading: 'ちゃん' }, + { term: 'たん', reading: 'たん' }, + { term: '坊', reading: 'ぼう' }, + { term: '殿', reading: 'どの' }, + { term: '博士', reading: 'はかせ' }, + { term: '社長', reading: 'しゃちょう' }, + { term: '部長', reading: 'ぶちょう' }, +] as const; diff --git a/src/main/character-dictionary-runtime/description.ts b/src/main/character-dictionary-runtime/description.ts new file mode 100644 index 0000000..7510a8c --- /dev/null +++ b/src/main/character-dictionary-runtime/description.ts @@ -0,0 +1,82 @@ +import type { CharacterBirthday, CharacterRecord } from './types'; + +const MONTH_NAMES: ReadonlyArray<[number, string]> = [ + [1, 'January'], + [2, 'February'], + [3, 'March'], + [4, 'April'], + [5, 'May'], + [6, 'June'], + [7, 'July'], + [8, 'August'], + [9, 'September'], + [10, 'October'], + [11, 'November'], + [12, 'December'], +]; + +const SEX_DISPLAY: ReadonlyArray<[string, string]> = [ + ['m', '♂ Male'], + ['f', '♀ Female'], + ['male', '♂ Male'], + ['female', '♀ Female'], +]; + +function formatBirthday(birthday: CharacterBirthday | null): string { + if (!birthday) return ''; + const [month, day] = birthday; + const monthName = MONTH_NAMES.find(([m]) => m === month)?.[1] || 'Unknown'; + return `${monthName} ${day}`; +} + +export function formatCharacterStats(character: CharacterRecord): string { + const parts: string[] = []; + const normalizedSex = character.sex.trim().toLowerCase(); + const sexDisplay = SEX_DISPLAY.find(([key]) => key === normalizedSex)?.[1]; + if (sexDisplay) parts.push(sexDisplay); + if (character.age.trim()) parts.push(`${character.age.trim()} years`); + if (character.bloodType.trim()) parts.push(`Blood Type ${character.bloodType.trim()}`); + const birthday = formatBirthday(character.birthday); + if (birthday) parts.push(`Birthday: ${birthday}`); + return parts.join(' • '); +} + +export function parseCharacterDescription(raw: string): { + fields: Array<{ key: string; value: string }>; + text: string; +} { + const cleaned = raw.replace(//gi, '\n').replace(/<[^>]+>/g, ' '); + const lines = cleaned.split(/\n/); + const fields: Array<{ key: string; value: string }> = []; + const textLines: string[] = []; + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) continue; + const match = trimmed.match(/^__([^_]+):__\s*(.+)$/); + if (match) { + const value = match[2]! + .replace(/__([^_]+)__/g, '$1') + .replace(/\*\*([^*]+)\*\*/g, '$1') + .replace(/_([^_]+)_/g, '$1') + .replace(/\*([^*]+)\*/g, '$1') + .trim(); + fields.push({ key: match[1]!.trim(), value }); + } else { + textLines.push(trimmed); + } + } + + const text = textLines + .join(' ') + .replace(/\[([^\]]+)\]\((https?:\/\/[^)\s]+)\)/g, '$1') + .replace(/https?:\/\/\S+/g, '') + .replace(/__([^_]+)__/g, '$1') + .replace(/\*\*([^*]+)\*\*/g, '$1') + .replace(/~!/g, '') + .replace(/!~/g, '') + .replace(/\s+/g, ' ') + .trim(); + + return { fields, text }; +} diff --git a/src/main/character-dictionary-runtime/fetch.ts b/src/main/character-dictionary-runtime/fetch.ts new file mode 100644 index 0000000..61ba245 --- /dev/null +++ b/src/main/character-dictionary-runtime/fetch.ts @@ -0,0 +1,386 @@ +import type { AnilistMediaGuess } from '../../core/services/anilist/anilist-updater'; +import { ANILIST_GRAPHQL_URL } from './constants'; +import type { + CharacterDictionaryRole, + CharacterRecord, + ResolvedAniListMedia, + VoiceActorRecord, +} from './types'; + +type AniListSearchResponse = { + Page?: { + media?: Array<{ + id: number; + episodes?: number | null; + title?: { + romaji?: string | null; + english?: string | null; + native?: string | null; + }; + }>; + }; +}; + +type AniListCharacterPageResponse = { + Media?: { + title?: { + romaji?: string | null; + english?: string | null; + native?: string | null; + }; + characters?: { + pageInfo?: { + hasNextPage?: boolean | null; + }; + edges?: Array<{ + role?: string | null; + voiceActors?: Array<{ + id: number; + name?: { + full?: string | null; + native?: string | null; + } | null; + image?: { + large?: string | null; + medium?: string | null; + } | null; + }> | null; + node?: { + id: number; + description?: string | null; + image?: { + large?: string | null; + medium?: string | null; + } | null; + gender?: string | null; + age?: string | number | null; + dateOfBirth?: { + month?: number | null; + day?: number | null; + } | null; + bloodType?: string | null; + name?: { + first?: string | null; + full?: string | null; + last?: string | null; + native?: string | null; + alternative?: Array | null; + } | null; + } | null; + } | null>; + } | null; + } | null; +}; + +function normalizeTitle(value: string): string { + return value.trim().toLowerCase().replace(/\s+/g, ' '); +} + +function pickAniListSearchResult( + title: string, + episode: number | null, + media: Array<{ + id: number; + episodes?: number | null; + title?: { + romaji?: string | null; + english?: string | null; + native?: string | null; + }; + }>, +): ResolvedAniListMedia | null { + if (media.length === 0) return null; + + const episodeFiltered = + episode && episode > 0 + ? media.filter((entry) => { + const totalEpisodes = entry.episodes; + return ( + typeof totalEpisodes !== 'number' || totalEpisodes <= 0 || episode <= totalEpisodes + ); + }) + : media; + const candidates = episodeFiltered.length > 0 ? episodeFiltered : media; + const normalizedTitle = normalizeTitle(title); + + const exact = candidates.find((entry) => { + const titles = [entry.title?.english, entry.title?.romaji, entry.title?.native] + .filter((value): value is string => typeof value === 'string') + .map((value) => normalizeTitle(value)); + return titles.includes(normalizedTitle); + }); + const selected = exact ?? candidates[0] ?? media[0]; + if (!selected) return null; + + const selectedTitle = + selected.title?.english?.trim() || + selected.title?.romaji?.trim() || + selected.title?.native?.trim() || + title.trim(); + return { + id: selected.id, + title: selectedTitle, + }; +} + +async function fetchAniList( + query: string, + variables: Record, + beforeRequest?: () => Promise, +): Promise { + if (beforeRequest) { + await beforeRequest(); + } + const response = await fetch(ANILIST_GRAPHQL_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + query, + variables, + }), + }); + if (!response.ok) { + throw new Error(`AniList request failed (${response.status})`); + } + const payload = (await response.json()) as { + data?: T; + errors?: Array<{ message?: string }>; + }; + const firstError = payload.errors?.find((entry) => entry && typeof entry.message === 'string'); + if (firstError?.message) { + throw new Error(firstError.message); + } + if (!payload.data) { + throw new Error('AniList response missing data'); + } + return payload.data; +} + +function mapRole(input: string | null | undefined): CharacterDictionaryRole { + const value = (input || '').trim().toUpperCase(); + if (value === 'MAIN') return 'main'; + if (value === 'SUPPORTING') return 'primary'; + if (value === 'BACKGROUND') return 'side'; + return 'side'; +} + +function inferImageExt(contentType: string | null): string { + const normalized = (contentType || '').toLowerCase(); + if (normalized.includes('png')) return 'png'; + if (normalized.includes('gif')) return 'gif'; + if (normalized.includes('webp')) return 'webp'; + return 'jpg'; +} + +export async function resolveAniListMediaIdFromGuess( + guess: AnilistMediaGuess, + beforeRequest?: () => Promise, +): Promise { + const data = await fetchAniList( + ` + query($search: String!) { + Page(perPage: 10) { + media(search: $search, type: ANIME, sort: [SEARCH_MATCH, POPULARITY_DESC]) { + id + episodes + title { + romaji + english + native + } + } + } + } + `, + { + search: guess.title, + }, + beforeRequest, + ); + + const media = data.Page?.media ?? []; + const resolved = pickAniListSearchResult(guess.title, guess.episode, media); + if (!resolved) { + throw new Error(`No AniList media match found for "${guess.title}".`); + } + return resolved; +} + +export async function fetchCharactersForMedia( + mediaId: number, + beforeRequest?: () => Promise, + onPageFetched?: (page: number) => void, +): Promise<{ + mediaTitle: string; + characters: CharacterRecord[]; +}> { + const characters: CharacterRecord[] = []; + let page = 1; + let mediaTitle = ''; + for (;;) { + const data = await fetchAniList( + ` + query($id: Int!, $page: Int!) { + Media(id: $id, type: ANIME) { + title { + romaji + english + native + } + characters(page: $page, perPage: 50, sort: [ROLE, RELEVANCE, ID]) { + pageInfo { + hasNextPage + } + edges { + role + voiceActors(language: JAPANESE) { + id + name { + full + native + } + image { + medium + } + } + node { + id + description(asHtml: false) + gender + age + dateOfBirth { + month + day + } + bloodType + image { + large + medium + } + name { + first + full + last + native + alternative + } + } + } + } + } + } + `, + { + id: mediaId, + page, + }, + beforeRequest, + ); + onPageFetched?.(page); + + const media = data.Media; + if (!media) { + throw new Error(`AniList media ${mediaId} not found.`); + } + if (!mediaTitle) { + mediaTitle = + media.title?.english?.trim() || + media.title?.romaji?.trim() || + media.title?.native?.trim() || + `AniList ${mediaId}`; + } + + const edges = media.characters?.edges ?? []; + for (const edge of edges) { + const node = edge?.node; + if (!node || typeof node.id !== 'number') continue; + const firstNameHint = node.name?.first?.trim() || ''; + const fullName = node.name?.full?.trim() || ''; + const lastNameHint = node.name?.last?.trim() || ''; + const nativeName = node.name?.native?.trim() || ''; + const alternativeNames = [ + ...new Set( + (node.name?.alternative ?? []) + .filter((value): value is string => typeof value === 'string') + .map((value) => value.trim()) + .filter((value) => value.length > 0), + ), + ]; + if (!nativeName) continue; + const voiceActors: VoiceActorRecord[] = []; + for (const va of edge?.voiceActors ?? []) { + if (!va || typeof va.id !== 'number') continue; + const vaFull = va.name?.full?.trim() || ''; + const vaNative = va.name?.native?.trim() || ''; + if (!vaFull && !vaNative) continue; + voiceActors.push({ + id: va.id, + fullName: vaFull, + nativeName: vaNative, + imageUrl: va.image?.medium || null, + }); + } + characters.push({ + id: node.id, + role: mapRole(edge?.role), + firstNameHint, + fullName, + lastNameHint, + nativeName, + alternativeNames, + bloodType: node.bloodType?.trim() || '', + birthday: + typeof node.dateOfBirth?.month === 'number' && typeof node.dateOfBirth?.day === 'number' + ? [node.dateOfBirth.month, node.dateOfBirth.day] + : null, + description: node.description || '', + imageUrl: node.image?.large || node.image?.medium || null, + age: + typeof node.age === 'string' + ? node.age.trim() + : typeof node.age === 'number' + ? String(node.age) + : '', + sex: node.gender?.trim() || '', + voiceActors, + }); + } + + const hasNextPage = Boolean(media.characters?.pageInfo?.hasNextPage); + if (!hasNextPage) { + break; + } + page += 1; + } + + return { + mediaTitle, + characters, + }; +} + +export async function downloadCharacterImage( + imageUrl: string, + charId: number, +): Promise<{ + filename: string; + ext: string; + bytes: Buffer; +} | null> { + try { + const response = await fetch(imageUrl); + if (!response.ok) return null; + const bytes = Buffer.from(await response.arrayBuffer()); + if (bytes.length === 0) return null; + const ext = inferImageExt(response.headers.get('content-type')); + return { + filename: `c${charId}.${ext}`, + ext, + bytes, + }; + } catch { + return null; + } +} diff --git a/src/main/character-dictionary-runtime/fs-utils.ts b/src/main/character-dictionary-runtime/fs-utils.ts new file mode 100644 index 0000000..f53af46 --- /dev/null +++ b/src/main/character-dictionary-runtime/fs-utils.ts @@ -0,0 +1 @@ +export { ensureDir } from '../../shared/fs-utils'; diff --git a/src/main/character-dictionary-runtime/glossary.ts b/src/main/character-dictionary-runtime/glossary.ts new file mode 100644 index 0000000..c042b39 --- /dev/null +++ b/src/main/character-dictionary-runtime/glossary.ts @@ -0,0 +1,243 @@ +import type { AnilistCharacterDictionaryCollapsibleSectionKey } from '../../types'; +import { formatCharacterStats, parseCharacterDescription } from './description'; +import type { + CharacterDictionaryGlossaryEntry, + CharacterDictionaryRole, + CharacterRecord, + VoiceActorRecord, +} from './types'; + +function roleLabel(role: CharacterDictionaryRole): string { + if (role === 'main') return 'Protagonist'; + if (role === 'primary') return 'Main Character'; + if (role === 'side') return 'Side Character'; + return 'Minor Role'; +} + +function roleBadgeStyle(role: CharacterDictionaryRole): Record { + const base = { + borderRadius: '4px', + padding: '0.15em 0.5em', + fontSize: '0.8em', + fontWeight: 'bold', + color: '#fff', + }; + if (role === 'main') return { ...base, backgroundColor: '#4CAF50' }; + if (role === 'primary') return { ...base, backgroundColor: '#2196F3' }; + if (role === 'side') return { ...base, backgroundColor: '#FF9800' }; + return { ...base, backgroundColor: '#9E9E9E' }; +} + +function buildCollapsibleSection( + title: string, + open: boolean, + body: Array> | string | Record, +): Record { + return { + tag: 'details', + open, + style: { marginTop: '0.4em' }, + content: [ + { + tag: 'summary', + style: { fontWeight: 'bold', fontSize: '0.95em', cursor: 'pointer' }, + content: title, + }, + { + tag: 'div', + style: { padding: '0.25em 0 0 0.4em', fontSize: '0.9em' }, + content: body, + }, + ], + }; +} + +function buildVoicedByContent( + voiceActors: VoiceActorRecord[], + vaImagePaths: Map, +): Record { + if (voiceActors.length === 1) { + const va = voiceActors[0]!; + const vaImgPath = vaImagePaths.get(va.id); + const vaLabel = va.nativeName + ? va.fullName + ? `${va.nativeName} (${va.fullName})` + : va.nativeName + : va.fullName; + + if (vaImgPath) { + return { + tag: 'table', + content: { + tag: 'tr', + content: [ + { + tag: 'td', + style: { + verticalAlign: 'top', + padding: '0', + paddingRight: '0.4em', + borderWidth: '0', + }, + content: { + tag: 'img', + path: vaImgPath, + width: 3, + height: 3, + sizeUnits: 'em', + title: vaLabel, + alt: vaLabel, + collapsed: false, + collapsible: false, + background: true, + }, + }, + { + tag: 'td', + style: { verticalAlign: 'middle', padding: '0', borderWidth: '0' }, + content: vaLabel, + }, + ], + }, + }; + } + + return { tag: 'div', content: vaLabel }; + } + + const items: Array> = []; + for (const va of voiceActors) { + const vaLabel = va.nativeName + ? va.fullName + ? `${va.nativeName} (${va.fullName})` + : va.nativeName + : va.fullName; + items.push({ tag: 'li', content: vaLabel }); + } + return { tag: 'ul', style: { marginTop: '0.15em' }, content: items }; +} + +export function createDefinitionGlossary( + character: CharacterRecord, + mediaTitle: string, + imagePath: string | null, + vaImagePaths: Map, + getCollapsibleSectionOpenState: ( + section: AnilistCharacterDictionaryCollapsibleSectionKey, + ) => boolean, +): CharacterDictionaryGlossaryEntry[] { + const displayName = character.nativeName || character.fullName || `Character ${character.id}`; + const secondaryName = + character.nativeName && character.fullName && character.fullName !== character.nativeName + ? character.fullName + : null; + const { fields, text: descriptionText } = parseCharacterDescription(character.description); + + const content: Array> = [ + { + tag: 'div', + style: { fontWeight: 'bold', fontSize: '1.1em', marginBottom: '0.1em' }, + content: displayName, + }, + ]; + + if (secondaryName) { + content.push({ + tag: 'div', + style: { fontSize: '0.85em', fontStyle: 'italic', color: '#b0b0b0', marginBottom: '0.2em' }, + content: secondaryName, + }); + } + + if (imagePath) { + content.push({ + tag: 'div', + style: { marginTop: '0.3em', marginBottom: '0.3em' }, + content: { + tag: 'img', + path: imagePath, + width: 8, + height: 11, + sizeUnits: 'em', + title: displayName, + alt: displayName, + description: `${displayName} · ${mediaTitle}`, + collapsed: false, + collapsible: false, + background: true, + }, + }); + } + + content.push({ + tag: 'div', + style: { fontSize: '0.8em', color: '#999', marginBottom: '0.2em' }, + content: `From: ${mediaTitle}`, + }); + + content.push({ + tag: 'div', + style: { marginBottom: '0.15em' }, + content: { + tag: 'span', + style: roleBadgeStyle(character.role), + content: roleLabel(character.role), + }, + }); + + const statsLine = formatCharacterStats(character); + if (descriptionText) { + content.push( + buildCollapsibleSection( + 'Description', + getCollapsibleSectionOpenState('description'), + descriptionText, + ), + ); + } + + const fieldItems: Array> = []; + if (statsLine) { + fieldItems.push({ + tag: 'li', + style: { fontWeight: 'bold' }, + content: statsLine, + }); + } + fieldItems.push( + ...fields.map((field) => ({ + tag: 'li', + content: `${field.key}: ${field.value}`, + })), + ); + if (fieldItems.length > 0) { + content.push( + buildCollapsibleSection( + 'Character Information', + getCollapsibleSectionOpenState('characterInformation'), + { + tag: 'ul', + style: { marginTop: '0.15em' }, + content: fieldItems, + }, + ), + ); + } + + if (character.voiceActors.length > 0) { + content.push( + buildCollapsibleSection( + 'Voiced by', + getCollapsibleSectionOpenState('voicedBy'), + buildVoicedByContent(character.voiceActors, vaImagePaths), + ), + ); + } + + return [ + { + type: 'structured-content', + content: { tag: 'div', content }, + }, + ]; +} diff --git a/src/main/character-dictionary-runtime/name-reading.ts b/src/main/character-dictionary-runtime/name-reading.ts new file mode 100644 index 0000000..2876ef2 --- /dev/null +++ b/src/main/character-dictionary-runtime/name-reading.ts @@ -0,0 +1,496 @@ +import { HONORIFIC_SUFFIXES } from './constants'; +import type { JapaneseNameParts, NameReadings } from './types'; + +export function hasKanaOnly(value: string): boolean { + return /^[\u3040-\u309f\u30a0-\u30ffー]+$/.test(value); +} + +function katakanaToHiragana(value: string): string { + let output = ''; + for (const char of value) { + const code = char.charCodeAt(0); + if (code >= 0x30a1 && code <= 0x30f6) { + output += String.fromCharCode(code - 0x60); + continue; + } + output += char; + } + return output; +} + +export function buildReading(term: string): string { + const compact = term.replace(/\s+/g, '').trim(); + if (!compact || !hasKanaOnly(compact)) { + return ''; + } + return katakanaToHiragana(compact); +} + +export function containsKanji(value: string): boolean { + for (const char of value) { + const code = char.charCodeAt(0); + if ((code >= 0x4e00 && code <= 0x9fff) || (code >= 0x3400 && code <= 0x4dbf)) { + return true; + } + } + return false; +} + +export function isRomanizedName(value: string): boolean { + return /^[A-Za-zĀĪŪĒŌÂÊÎÔÛāīūēōâêîôû'’.\-\s]+$/.test(value); +} + +function normalizeRomanizedName(value: string): string { + return value + .normalize('NFKC') + .toLowerCase() + .replace(/[’']/g, '') + .replace(/[.\-]/g, ' ') + .replace(/ā|â/g, 'aa') + .replace(/ī|î/g, 'ii') + .replace(/ū|û/g, 'uu') + .replace(/ē|ê/g, 'ei') + .replace(/ō|ô/g, 'ou') + .replace(/\s+/g, ' ') + .trim(); +} + +const ROMANIZED_KANA_DIGRAPHS: ReadonlyArray<[string, string]> = [ + ['kya', 'キャ'], + ['kyu', 'キュ'], + ['kyo', 'キョ'], + ['gya', 'ギャ'], + ['gyu', 'ギュ'], + ['gyo', 'ギョ'], + ['sha', 'シャ'], + ['shu', 'シュ'], + ['sho', 'ショ'], + ['sya', 'シャ'], + ['syu', 'シュ'], + ['syo', 'ショ'], + ['ja', 'ジャ'], + ['ju', 'ジュ'], + ['jo', 'ジョ'], + ['jya', 'ジャ'], + ['jyu', 'ジュ'], + ['jyo', 'ジョ'], + ['cha', 'チャ'], + ['chu', 'チュ'], + ['cho', 'チョ'], + ['tya', 'チャ'], + ['tyu', 'チュ'], + ['tyo', 'チョ'], + ['cya', 'チャ'], + ['cyu', 'チュ'], + ['cyo', 'チョ'], + ['nya', 'ニャ'], + ['nyu', 'ニュ'], + ['nyo', 'ニョ'], + ['hya', 'ヒャ'], + ['hyu', 'ヒュ'], + ['hyo', 'ヒョ'], + ['bya', 'ビャ'], + ['byu', 'ビュ'], + ['byo', 'ビョ'], + ['pya', 'ピャ'], + ['pyu', 'ピュ'], + ['pyo', 'ピョ'], + ['mya', 'ミャ'], + ['myu', 'ミュ'], + ['myo', 'ミョ'], + ['rya', 'リャ'], + ['ryu', 'リュ'], + ['ryo', 'リョ'], + ['fa', 'ファ'], + ['fi', 'フィ'], + ['fe', 'フェ'], + ['fo', 'フォ'], + ['fyu', 'フュ'], + ['fyo', 'フョ'], + ['fya', 'フャ'], + ['va', 'ヴァ'], + ['vi', 'ヴィ'], + ['vu', 'ヴ'], + ['ve', 'ヴェ'], + ['vo', 'ヴォ'], + ['she', 'シェ'], + ['che', 'チェ'], + ['je', 'ジェ'], + ['tsi', 'ツィ'], + ['tse', 'ツェ'], + ['tsa', 'ツァ'], + ['tso', 'ツォ'], + ['thi', 'ティ'], + ['thu', 'テュ'], + ['dhi', 'ディ'], + ['dhu', 'デュ'], + ['wi', 'ウィ'], + ['we', 'ウェ'], + ['wo', 'ウォ'], +]; + +const ROMANIZED_KANA_MONOGRAPHS: ReadonlyArray<[string, string]> = [ + ['a', 'ア'], + ['i', 'イ'], + ['u', 'ウ'], + ['e', 'エ'], + ['o', 'オ'], + ['ka', 'カ'], + ['ki', 'キ'], + ['ku', 'ク'], + ['ke', 'ケ'], + ['ko', 'コ'], + ['ga', 'ガ'], + ['gi', 'ギ'], + ['gu', 'グ'], + ['ge', 'ゲ'], + ['go', 'ゴ'], + ['sa', 'サ'], + ['shi', 'シ'], + ['si', 'シ'], + ['su', 'ス'], + ['se', 'セ'], + ['so', 'ソ'], + ['za', 'ザ'], + ['ji', 'ジ'], + ['zi', 'ジ'], + ['zu', 'ズ'], + ['ze', 'ゼ'], + ['zo', 'ゾ'], + ['ta', 'タ'], + ['chi', 'チ'], + ['ti', 'チ'], + ['tsu', 'ツ'], + ['tu', 'ツ'], + ['te', 'テ'], + ['to', 'ト'], + ['da', 'ダ'], + ['de', 'デ'], + ['do', 'ド'], + ['na', 'ナ'], + ['ni', 'ニ'], + ['nu', 'ヌ'], + ['ne', 'ネ'], + ['no', 'ノ'], + ['ha', 'ハ'], + ['hi', 'ヒ'], + ['fu', 'フ'], + ['hu', 'フ'], + ['he', 'ヘ'], + ['ho', 'ホ'], + ['ba', 'バ'], + ['bi', 'ビ'], + ['bu', 'ブ'], + ['be', 'ベ'], + ['bo', 'ボ'], + ['pa', 'パ'], + ['pi', 'ピ'], + ['pu', 'プ'], + ['pe', 'ペ'], + ['po', 'ポ'], + ['ma', 'マ'], + ['mi', 'ミ'], + ['mu', 'ム'], + ['me', 'メ'], + ['mo', 'モ'], + ['ya', 'ヤ'], + ['yu', 'ユ'], + ['yo', 'ヨ'], + ['ra', 'ラ'], + ['ri', 'リ'], + ['ru', 'ル'], + ['re', 'レ'], + ['ro', 'ロ'], + ['wa', 'ワ'], + ['w', 'ウ'], + ['wo', 'ヲ'], + ['n', 'ン'], +]; + +function romanizedTokenToKatakana(token: string): string | null { + const normalized = normalizeRomanizedName(token).replace(/\s+/g, ''); + if (!normalized || !/^[a-z]+$/.test(normalized)) { + return null; + } + + let output = ''; + for (let i = 0; i < normalized.length; ) { + const current = normalized[i]!; + const next = normalized[i + 1] ?? ''; + + if ( + i + 1 < normalized.length && + current === next && + current !== 'n' && + !'aeiou'.includes(current) + ) { + output += 'ッ'; + i += 1; + continue; + } + + if (current === 'n' && next.length > 0 && next !== 'y' && !'aeiou'.includes(next)) { + output += 'ン'; + i += 1; + continue; + } + + const digraph = ROMANIZED_KANA_DIGRAPHS.find(([romaji]) => normalized.startsWith(romaji, i)); + if (digraph) { + output += digraph[1]; + i += digraph[0].length; + continue; + } + + const monograph = ROMANIZED_KANA_MONOGRAPHS.find(([romaji]) => + normalized.startsWith(romaji, i), + ); + if (monograph) { + output += monograph[1]; + i += monograph[0].length; + continue; + } + + return null; + } + + return output.length > 0 ? output : null; +} + +export function buildReadingFromRomanized(value: string): string { + const katakana = romanizedTokenToKatakana(value); + return katakana ? katakanaToHiragana(katakana) : ''; +} + +function buildReadingFromHint(value: string): string { + return buildReading(value) || buildReadingFromRomanized(value); +} + +function scoreJapaneseNamePartLength(length: number): number { + if (length === 2) return 3; + if (length === 1 || length === 3) return 2; + if (length === 4) return 1; + return 0; +} + +function inferJapaneseNameSplitIndex( + nameOriginal: string, + firstNameHint: string, + lastNameHint: string, +): number | null { + const chars = [...nameOriginal]; + if (chars.length < 2) return null; + + const familyHintLength = [...buildReadingFromHint(lastNameHint)].length; + const givenHintLength = [...buildReadingFromHint(firstNameHint)].length; + const totalHintLength = familyHintLength + givenHintLength; + const defaultBoundary = Math.round(chars.length / 2); + let bestIndex: number | null = null; + let bestScore = Number.NEGATIVE_INFINITY; + + for (let index = 1; index < chars.length; index += 1) { + const familyLength = index; + const givenLength = chars.length - index; + let score = + scoreJapaneseNamePartLength(familyLength) + scoreJapaneseNamePartLength(givenLength); + + if (chars.length >= 4 && familyLength >= 2 && givenLength >= 2) { + score += 1; + } + + if (totalHintLength > 0) { + const expectedFamilyLength = (chars.length * familyHintLength) / totalHintLength; + score -= Math.abs(familyLength - expectedFamilyLength) * 1.5; + } else { + score -= Math.abs(familyLength - defaultBoundary) * 0.5; + } + + if (familyLength === givenLength) { + score += 0.25; + } + + if (score > bestScore) { + bestScore = score; + bestIndex = index; + } + } + + return bestIndex; +} + +export function addRomanizedKanaAliases(values: Iterable): string[] { + const aliases = new Set(); + for (const value of values) { + const trimmed = value.trim(); + if (!trimmed || !isRomanizedName(trimmed)) continue; + const katakana = romanizedTokenToKatakana(trimmed); + if (katakana) { + aliases.add(katakana); + } + } + return [...aliases]; +} + +export function splitJapaneseName( + nameOriginal: string, + firstNameHint?: string, + lastNameHint?: string, +): JapaneseNameParts { + const trimmed = nameOriginal.trim(); + if (!trimmed) { + return { + hasSpace: false, + original: '', + combined: '', + family: null, + given: null, + }; + } + + const normalizedSpace = trimmed.replace(/[\s\u3000]+/g, ' ').trim(); + const spaceParts = normalizedSpace.split(' ').filter((part) => part.length > 0); + if (spaceParts.length === 2) { + const family = spaceParts[0]!; + const given = spaceParts[1]!; + return { + hasSpace: true, + original: normalizedSpace, + combined: `${family}${given}`, + family, + given, + }; + } + + const middleDotParts = trimmed + .split(/[・・·•]/) + .map((part) => part.trim()) + .filter((part) => part.length > 0); + if (middleDotParts.length === 2) { + const family = middleDotParts[0]!; + const given = middleDotParts[1]!; + return { + hasSpace: true, + original: trimmed, + combined: `${family}${given}`, + family, + given, + }; + } + + const hintedFirst = firstNameHint?.trim() || ''; + const hintedLast = lastNameHint?.trim() || ''; + if (hintedFirst && hintedLast) { + const familyGiven = `${hintedLast}${hintedFirst}`; + if (trimmed === familyGiven) { + return { + hasSpace: true, + original: trimmed, + combined: familyGiven, + family: hintedLast, + given: hintedFirst, + }; + } + + const givenFamily = `${hintedFirst}${hintedLast}`; + if (trimmed === givenFamily) { + return { + hasSpace: true, + original: trimmed, + combined: givenFamily, + family: hintedFirst, + given: hintedLast, + }; + } + } + + if (hintedFirst && hintedLast && containsKanji(trimmed)) { + const splitIndex = inferJapaneseNameSplitIndex(trimmed, hintedFirst, hintedLast); + if (splitIndex != null) { + const chars = [...trimmed]; + const family = chars.slice(0, splitIndex).join(''); + const given = chars.slice(splitIndex).join(''); + if (family && given) { + return { + hasSpace: true, + original: trimmed, + combined: trimmed, + family, + given, + }; + } + } + } + + return { + hasSpace: false, + original: trimmed, + combined: trimmed, + family: null, + given: null, + }; +} + +export function generateNameReadings( + nameOriginal: string, + romanizedName: string, + firstNameHint?: string, + lastNameHint?: string, +): NameReadings { + const trimmed = nameOriginal.trim(); + if (!trimmed) { + return { + hasSpace: false, + original: '', + full: '', + family: '', + given: '', + }; + } + + const nameParts = splitJapaneseName(trimmed, firstNameHint, lastNameHint); + if (!nameParts.hasSpace || !nameParts.family || !nameParts.given) { + const full = containsKanji(trimmed) + ? buildReadingFromRomanized(romanizedName) + : buildReading(trimmed); + return { + hasSpace: false, + original: trimmed, + full, + family: full, + given: full, + }; + } + + const romanizedParts = romanizedName + .trim() + .split(/\s+/) + .filter((part) => part.length > 0); + const familyFromHints = buildReadingFromHint(lastNameHint || ''); + const givenFromHints = buildReadingFromHint(firstNameHint || ''); + const familyRomajiFallback = romanizedParts[0] || ''; + const givenRomajiFallback = romanizedParts.slice(1).join(' '); + const family = + familyFromHints || + (containsKanji(nameParts.family) + ? buildReadingFromRomanized(familyRomajiFallback) + : buildReading(nameParts.family)); + const given = + givenFromHints || + (containsKanji(nameParts.given) + ? buildReadingFromRomanized(givenRomajiFallback) + : buildReading(nameParts.given)); + const full = + `${family}${given}` || buildReading(trimmed) || buildReadingFromRomanized(romanizedName); + + return { + hasSpace: true, + original: nameParts.original, + full, + family, + given, + }; +} + +export function buildHonorificAliases(value: string): string[] { + return HONORIFIC_SUFFIXES.map((suffix) => `${value}${suffix.term}`); +} diff --git a/src/main/character-dictionary-runtime/snapshot.ts b/src/main/character-dictionary-runtime/snapshot.ts new file mode 100644 index 0000000..a2e6a5a --- /dev/null +++ b/src/main/character-dictionary-runtime/snapshot.ts @@ -0,0 +1,144 @@ +import type { AnilistCharacterDictionaryCollapsibleSectionKey } from '../../types'; +import { CHARACTER_DICTIONARY_FORMAT_VERSION } from './constants'; +import { createDefinitionGlossary } from './glossary'; +import { generateNameReadings, splitJapaneseName } from './name-reading'; +import { buildNameTerms, buildReadingForTerm, buildTermEntry } from './term-building'; +import type { + CharacterDictionaryGlossaryEntry, + CharacterDictionarySnapshot, + CharacterDictionarySnapshotImage, + CharacterDictionaryTermEntry, + CharacterRecord, +} from './types'; + +export function buildSnapshotImagePath(mediaId: number, charId: number, ext: string): string { + return `img/m${mediaId}-c${charId}.${ext}`; +} + +export function buildVaImagePath(mediaId: number, vaId: number, ext: string): string { + return `img/m${mediaId}-va${vaId}.${ext}`; +} + +export function buildSnapshotFromCharacters( + mediaId: number, + mediaTitle: string, + characters: CharacterRecord[], + imagesByCharacterId: Map, + imagesByVaId: Map, + updatedAt: number, + getCollapsibleSectionOpenState: ( + section: AnilistCharacterDictionaryCollapsibleSectionKey, + ) => boolean, +): CharacterDictionarySnapshot { + const termEntries: CharacterDictionaryTermEntry[] = []; + + for (const character of characters) { + const seenTerms = new Set(); + const imagePath = imagesByCharacterId.get(character.id)?.path ?? null; + const vaImagePaths = new Map(); + for (const va of character.voiceActors) { + const vaImg = imagesByVaId.get(va.id); + if (vaImg) vaImagePaths.set(va.id, vaImg.path); + } + const glossary = createDefinitionGlossary( + character, + mediaTitle, + imagePath, + vaImagePaths, + getCollapsibleSectionOpenState, + ); + const candidateTerms = buildNameTerms(character); + const nameParts = splitJapaneseName( + character.nativeName, + character.firstNameHint, + character.lastNameHint, + ); + const readings = generateNameReadings( + character.nativeName, + character.fullName, + character.firstNameHint, + character.lastNameHint, + ); + for (const term of candidateTerms) { + if (seenTerms.has(term)) continue; + seenTerms.add(term); + const reading = buildReadingForTerm(term, character, readings, nameParts); + termEntries.push(buildTermEntry(term, reading, character.role, glossary)); + } + } + + if (termEntries.length === 0) { + throw new Error('No dictionary entries generated from AniList character data.'); + } + + return { + formatVersion: CHARACTER_DICTIONARY_FORMAT_VERSION, + mediaId, + mediaTitle, + entryCount: termEntries.length, + updatedAt, + termEntries, + images: [...imagesByCharacterId.values(), ...imagesByVaId.values()], + }; +} + +function getCollapsibleSectionKeyFromTitle( + title: string, +): AnilistCharacterDictionaryCollapsibleSectionKey | null { + if (title === 'Description') return 'description'; + if (title === 'Character Information') return 'characterInformation'; + if (title === 'Voiced by') return 'voicedBy'; + return null; +} + +function applyCollapsibleOpenStatesToStructuredValue( + value: unknown, + getCollapsibleSectionOpenState: ( + section: AnilistCharacterDictionaryCollapsibleSectionKey, + ) => boolean, +): unknown { + if (Array.isArray(value)) { + return value.map((item) => + applyCollapsibleOpenStatesToStructuredValue(item, getCollapsibleSectionOpenState), + ); + } + if (!value || typeof value !== 'object') { + return value; + } + + const record = value as Record; + const next: Record = {}; + for (const [key, child] of Object.entries(record)) { + next[key] = applyCollapsibleOpenStatesToStructuredValue(child, getCollapsibleSectionOpenState); + } + + if (record.tag === 'details') { + const content = Array.isArray(record.content) ? record.content : []; + const summary = content[0]; + if (summary && typeof summary === 'object' && !Array.isArray(summary)) { + const summaryContent = (summary as Record).content; + if (typeof summaryContent === 'string') { + const section = getCollapsibleSectionKeyFromTitle(summaryContent); + if (section) { + next.open = getCollapsibleSectionOpenState(section); + } + } + } + } + + return next; +} + +export function applyCollapsibleOpenStatesToTermEntries( + termEntries: CharacterDictionaryTermEntry[], + getCollapsibleSectionOpenState: ( + section: AnilistCharacterDictionaryCollapsibleSectionKey, + ) => boolean, +): CharacterDictionaryTermEntry[] { + return termEntries.map((entry) => { + const glossary = entry[5].map((item) => + applyCollapsibleOpenStatesToStructuredValue(item, getCollapsibleSectionOpenState), + ) as CharacterDictionaryGlossaryEntry[]; + return [...entry.slice(0, 5), glossary, ...entry.slice(6)] as CharacterDictionaryTermEntry; + }); +} diff --git a/src/main/character-dictionary-runtime/term-building.ts b/src/main/character-dictionary-runtime/term-building.ts new file mode 100644 index 0000000..8b776c5 --- /dev/null +++ b/src/main/character-dictionary-runtime/term-building.ts @@ -0,0 +1,170 @@ +import { HONORIFIC_SUFFIXES } from './constants'; +import { + addRomanizedKanaAliases, + buildReading, + buildReadingFromRomanized, + hasKanaOnly, + isRomanizedName, + splitJapaneseName, +} from './name-reading'; +import type { + CharacterDictionaryGlossaryEntry, + CharacterDictionaryRole, + CharacterDictionaryTermEntry, + CharacterRecord, + JapaneseNameParts, + NameReadings, +} from './types'; + +function expandRawNameVariants(rawName: string): string[] { + const trimmed = rawName.trim(); + if (!trimmed) return []; + + const variants = new Set([trimmed]); + const outer = trimmed + .replace(/[((][^()()]+[))]/g, ' ') + .replace(/\s+/g, ' ') + .trim(); + if (outer && outer !== trimmed) { + variants.add(outer); + } + + for (const match of trimmed.matchAll(/[((]([^()()]+)[))]/g)) { + const inner = match[1]?.trim() || ''; + if (inner) { + variants.add(inner); + } + } + + return [...variants]; +} + +export function buildNameTerms(character: CharacterRecord): string[] { + const base = new Set(); + const rawNames = [character.nativeName, character.fullName, ...character.alternativeNames]; + for (const rawName of rawNames) { + for (const name of expandRawNameVariants(rawName)) { + base.add(name); + + const compact = name.replace(/[\s\u3000]+/g, ''); + if (compact && compact !== name) { + base.add(compact); + } + + const noMiddleDots = compact.replace(/[・・·•]/g, ''); + if (noMiddleDots && noMiddleDots !== compact) { + base.add(noMiddleDots); + } + + const split = name.split(/[\s\u3000]+/).filter((part) => part.trim().length > 0); + if (split.length === 2) { + base.add(split[0]!); + base.add(split[1]!); + } + + const splitByMiddleDot = name + .split(/[・・·•]/) + .map((part) => part.trim()) + .filter((part) => part.length > 0); + if (splitByMiddleDot.length >= 2) { + for (const part of splitByMiddleDot) { + base.add(part); + } + } + } + } + + const nativeParts = splitJapaneseName( + character.nativeName, + character.firstNameHint, + character.lastNameHint, + ); + if (nativeParts.family) { + base.add(nativeParts.family); + } + if (nativeParts.given) { + base.add(nativeParts.given); + } + + const withHonorifics = new Set(); + for (const entry of base) { + withHonorifics.add(entry); + for (const suffix of HONORIFIC_SUFFIXES) { + withHonorifics.add(`${entry}${suffix.term}`); + } + } + + for (const alias of addRomanizedKanaAliases(withHonorifics)) { + withHonorifics.add(alias); + for (const suffix of HONORIFIC_SUFFIXES) { + withHonorifics.add(`${alias}${suffix.term}`); + } + } + + return [...withHonorifics].filter((entry) => entry.trim().length > 0); +} + +export function buildReadingForTerm( + term: string, + character: CharacterRecord, + readings: NameReadings, + nameParts: JapaneseNameParts, +): string { + for (const suffix of HONORIFIC_SUFFIXES) { + if (term.endsWith(suffix.term) && term.length > suffix.term.length) { + const baseTerm = term.slice(0, -suffix.term.length); + const baseReading = buildReadingForTerm(baseTerm, character, readings, nameParts); + return baseReading ? `${baseReading}${suffix.reading}` : ''; + } + } + + const compactNative = character.nativeName.replace(/[\s\u3000]+/g, ''); + const noMiddleDotsNative = compactNative.replace(/[・・·•]/g, ''); + if ( + term === character.nativeName || + term === compactNative || + term === noMiddleDotsNative || + term === nameParts.original || + term === nameParts.combined + ) { + return readings.full; + } + + const familyCompact = nameParts.family?.replace(/[・・·•]/g, '') || ''; + if (nameParts.family && (term === nameParts.family || term === familyCompact)) { + return readings.family; + } + + const givenCompact = nameParts.given?.replace(/[・・·•]/g, '') || ''; + if (nameParts.given && (term === nameParts.given || term === givenCompact)) { + return readings.given; + } + + const compact = term.replace(/[\s\u3000]+/g, ''); + if (hasKanaOnly(compact)) { + return buildReading(compact); + } + + if (isRomanizedName(term)) { + return buildReadingFromRomanized(term) || readings.full; + } + + return ''; +} + +function roleInfo(role: CharacterDictionaryRole): { tag: string; score: number } { + if (role === 'main') return { tag: 'main', score: 100 }; + if (role === 'primary') return { tag: 'primary', score: 75 }; + if (role === 'side') return { tag: 'side', score: 50 }; + return { tag: 'appears', score: 25 }; +} + +export function buildTermEntry( + term: string, + reading: string, + role: CharacterDictionaryRole, + glossary: CharacterDictionaryGlossaryEntry[], +): CharacterDictionaryTermEntry { + const { tag, score } = roleInfo(role); + return [term, reading, `name ${tag}`, '', score, glossary, 0, '']; +} diff --git a/src/main/character-dictionary-runtime/types.ts b/src/main/character-dictionary-runtime/types.ts new file mode 100644 index 0000000..81b057d --- /dev/null +++ b/src/main/character-dictionary-runtime/types.ts @@ -0,0 +1,136 @@ +import type { AnilistMediaGuess } from '../../core/services/anilist/anilist-updater'; +import type { AnilistCharacterDictionaryCollapsibleSectionKey } from '../../types'; + +export type CharacterDictionaryRole = 'main' | 'primary' | 'side' | 'appears'; + +export type CharacterDictionaryGlossaryEntry = string | Record; + +export type CharacterDictionaryTermEntry = [ + string, + string, + string, + string, + number, + CharacterDictionaryGlossaryEntry[], + number, + string, +]; + +export type CharacterDictionarySnapshotImage = { + path: string; + dataBase64: string; +}; + +export type CharacterBirthday = [number, number]; + +export type JapaneseNameParts = { + hasSpace: boolean; + original: string; + combined: string; + family: string | null; + given: string | null; +}; + +export type NameReadings = { + hasSpace: boolean; + original: string; + full: string; + family: string; + given: string; +}; + +export type CharacterDictionarySnapshot = { + formatVersion: number; + mediaId: number; + mediaTitle: string; + entryCount: number; + updatedAt: number; + termEntries: CharacterDictionaryTermEntry[]; + images: CharacterDictionarySnapshotImage[]; +}; + +export type VoiceActorRecord = { + id: number; + fullName: string; + nativeName: string; + imageUrl: string | null; +}; + +export type CharacterRecord = { + id: number; + role: CharacterDictionaryRole; + firstNameHint: string; + fullName: string; + lastNameHint: string; + nativeName: string; + alternativeNames: string[]; + bloodType: string; + birthday: CharacterBirthday | null; + description: string; + imageUrl: string | null; + age: string; + sex: string; + voiceActors: VoiceActorRecord[]; +}; + +export type CharacterDictionaryBuildResult = { + zipPath: string; + fromCache: boolean; + mediaId: number; + mediaTitle: string; + entryCount: number; + dictionaryTitle?: string; + revision?: string; +}; + +export type CharacterDictionaryGenerateOptions = { + refreshTtlMs?: number; +}; + +export type CharacterDictionarySnapshotResult = { + mediaId: number; + mediaTitle: string; + entryCount: number; + fromCache: boolean; + updatedAt: number; +}; + +export type CharacterDictionarySnapshotProgress = { + mediaId: number; + mediaTitle: string; +}; + +export type CharacterDictionarySnapshotProgressCallbacks = { + onChecking?: (progress: CharacterDictionarySnapshotProgress) => void; + onGenerating?: (progress: CharacterDictionarySnapshotProgress) => void; +}; + +export type MergedCharacterDictionaryBuildResult = { + zipPath: string; + revision: string; + dictionaryTitle: string; + entryCount: number; +}; + +export interface CharacterDictionaryRuntimeDeps { + userDataPath: string; + getCurrentMediaPath: () => string | null; + getCurrentMediaTitle: () => string | null; + resolveMediaPathForJimaku: (mediaPath: string | null) => string | null; + guessAnilistMediaInfo: ( + mediaPath: string | null, + mediaTitle: string | null, + ) => Promise; + now: () => number; + sleep?: (ms: number) => Promise; + logInfo?: (message: string) => void; + logWarn?: (message: string) => void; + getCollapsibleSectionOpenState?: ( + section: AnilistCharacterDictionaryCollapsibleSectionKey, + ) => boolean; +} + +export type ResolvedAniListMedia = { + id: number; + title: string; +}; diff --git a/src/main/character-dictionary-runtime/zip.test.ts b/src/main/character-dictionary-runtime/zip.test.ts new file mode 100644 index 0000000..a17f77a --- /dev/null +++ b/src/main/character-dictionary-runtime/zip.test.ts @@ -0,0 +1,104 @@ +import assert from 'node:assert/strict'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import test from 'node:test'; +import { buildDictionaryZip } from './zip'; +import type { CharacterDictionaryTermEntry } from './types'; + +function makeTempDir(): string { + return fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-character-zip-')); +} + +function cleanupDir(dirPath: string): void { + fs.rmSync(dirPath, { recursive: true, force: true }); +} + +function readStoredZipEntries(zipPath: string): Map { + const archive = fs.readFileSync(zipPath); + const entries = new Map(); + let cursor = 0; + + while (cursor + 4 <= archive.length) { + const signature = archive.readUInt32LE(cursor); + if (signature === 0x02014b50 || signature === 0x06054b50) { + break; + } + assert.equal(signature, 0x04034b50, `unexpected local file header at offset ${cursor}`); + + const compressedSize = archive.readUInt32LE(cursor + 18); + const fileNameLength = archive.readUInt16LE(cursor + 26); + const extraLength = archive.readUInt16LE(cursor + 28); + const fileNameStart = cursor + 30; + const dataStart = fileNameStart + fileNameLength + extraLength; + const fileName = archive.subarray(fileNameStart, fileNameStart + fileNameLength).toString( + 'utf8', + ); + const data = archive.subarray(dataStart, dataStart + compressedSize); + entries.set(fileName, Buffer.from(data)); + cursor = dataStart + compressedSize; + } + + return entries; +} + +test('buildDictionaryZip writes a valid stored zip without fs.writeFileSync', () => { + const tempDir = makeTempDir(); + const outputPath = path.join(tempDir, 'dictionary.zip'); + const termEntries: CharacterDictionaryTermEntry[] = [ + ['アルファ', 'あるふぁ', '', '', 0, ['Alpha entry'], 0, 'name'], + ]; + const originalWriteFileSync = fs.writeFileSync; + const originalBufferConcat = Buffer.concat; + + try { + fs.writeFileSync = ((..._args: unknown[]) => { + throw new Error('buildDictionaryZip should not call fs.writeFileSync'); + }) as typeof fs.writeFileSync; + + Buffer.concat = ((...args: Parameters) => { + throw new Error(`buildDictionaryZip should not Buffer.concat the full archive (${args[0].length} chunks)`); + }) as typeof Buffer.concat; + + const result = buildDictionaryZip( + outputPath, + 'Dictionary Title', + 'Dictionary Description', + '2026-03-27', + termEntries, + [{ path: 'images/alpha.bin', dataBase64: Buffer.from([1, 2, 3]).toString('base64') }], + ); + + assert.equal(result.zipPath, outputPath); + assert.equal(result.entryCount, 1); + + const entries = readStoredZipEntries(outputPath); + assert.deepEqual([...entries.keys()].sort(), [ + 'images/alpha.bin', + 'index.json', + 'tag_bank_1.json', + 'term_bank_1.json', + ]); + + const indexJson = JSON.parse(entries.get('index.json')!.toString('utf8')) as { + title: string; + description: string; + revision: string; + format: number; + }; + assert.equal(indexJson.title, 'Dictionary Title'); + assert.equal(indexJson.description, 'Dictionary Description'); + assert.equal(indexJson.revision, '2026-03-27'); + assert.equal(indexJson.format, 3); + + const termBank = JSON.parse(entries.get('term_bank_1.json')!.toString('utf8')) as + CharacterDictionaryTermEntry[]; + assert.equal(termBank.length, 1); + assert.equal(termBank[0]?.[0], 'アルファ'); + assert.deepEqual(entries.get('images/alpha.bin'), Buffer.from([1, 2, 3])); + } finally { + fs.writeFileSync = originalWriteFileSync; + Buffer.concat = originalBufferConcat; + cleanupDir(tempDir); + } +}); diff --git a/src/main/character-dictionary-runtime/zip.ts b/src/main/character-dictionary-runtime/zip.ts new file mode 100644 index 0000000..85bf34b --- /dev/null +++ b/src/main/character-dictionary-runtime/zip.ts @@ -0,0 +1,250 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import { ensureDir } from './fs-utils'; +import type { CharacterDictionarySnapshotImage, CharacterDictionaryTermEntry } from './types'; + +type ZipEntry = { + name: string; + crc32: number; + size: number; + localHeaderOffset: number; +}; + +function writeUint32LE(buffer: Buffer, value: number, offset: number): number { + const normalized = value >>> 0; + buffer[offset] = normalized & 0xff; + buffer[offset + 1] = (normalized >>> 8) & 0xff; + buffer[offset + 2] = (normalized >>> 16) & 0xff; + buffer[offset + 3] = (normalized >>> 24) & 0xff; + return offset + 4; +} + +export function buildDictionaryTitle(mediaId: number): string { + return `SubMiner Character Dictionary (AniList ${mediaId})`; +} + +function createIndex( + dictionaryTitle: string, + description: string, + revision: string, +): Record { + return { + title: dictionaryTitle, + revision, + format: 3, + author: 'SubMiner', + description, + }; +} + +function createTagBank(): Array<[string, string, number, string, number]> { + return [ + ['name', 'partOfSpeech', 0, 'Character name', 0], + ['main', 'name', 0, 'Protagonist', 0], + ['primary', 'name', 0, 'Main character', 0], + ['side', 'name', 0, 'Side character', 0], + ['appears', 'name', 0, 'Minor appearance', 0], + ]; +} + +const CRC32_TABLE = (() => { + const table = new Uint32Array(256); + for (let i = 0; i < 256; i += 1) { + let crc = i; + for (let j = 0; j < 8; j += 1) { + crc = (crc & 1) !== 0 ? 0xedb88320 ^ (crc >>> 1) : crc >>> 1; + } + table[i] = crc >>> 0; + } + return table; +})(); + +function crc32(data: Buffer): number { + let crc = 0xffffffff; + for (const byte of data) { + crc = CRC32_TABLE[(crc ^ byte) & 0xff]! ^ (crc >>> 8); + } + return (crc ^ 0xffffffff) >>> 0; +} + +function createLocalFileHeader(fileName: Buffer, fileCrc32: number, fileSize: number): Buffer { + const local = Buffer.alloc(30 + fileName.length); + let cursor = 0; + writeUint32LE(local, 0x04034b50, cursor); + cursor += 4; + local.writeUInt16LE(20, cursor); + cursor += 2; + local.writeUInt16LE(0, cursor); + cursor += 2; + local.writeUInt16LE(0, cursor); + cursor += 2; + local.writeUInt16LE(0, cursor); + cursor += 2; + local.writeUInt16LE(0, cursor); + cursor += 2; + writeUint32LE(local, fileCrc32, cursor); + cursor += 4; + writeUint32LE(local, fileSize, cursor); + cursor += 4; + writeUint32LE(local, fileSize, cursor); + cursor += 4; + local.writeUInt16LE(fileName.length, cursor); + cursor += 2; + local.writeUInt16LE(0, cursor); + cursor += 2; + fileName.copy(local, cursor); + return local; +} + +function createCentralDirectoryHeader(entry: ZipEntry): Buffer { + const fileName = Buffer.from(entry.name, 'utf8'); + const central = Buffer.alloc(46 + fileName.length); + let cursor = 0; + writeUint32LE(central, 0x02014b50, cursor); + cursor += 4; + central.writeUInt16LE(20, cursor); + cursor += 2; + central.writeUInt16LE(20, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + writeUint32LE(central, entry.crc32, cursor); + cursor += 4; + writeUint32LE(central, entry.size, cursor); + cursor += 4; + writeUint32LE(central, entry.size, cursor); + cursor += 4; + central.writeUInt16LE(fileName.length, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + central.writeUInt16LE(0, cursor); + cursor += 2; + writeUint32LE(central, 0, cursor); + cursor += 4; + writeUint32LE(central, entry.localHeaderOffset, cursor); + cursor += 4; + fileName.copy(central, cursor); + return central; +} + +function createEndOfCentralDirectory(entriesLength: number, centralSize: number, centralStart: number): Buffer { + const end = Buffer.alloc(22); + let cursor = 0; + writeUint32LE(end, 0x06054b50, cursor); + cursor += 4; + end.writeUInt16LE(0, cursor); + cursor += 2; + end.writeUInt16LE(0, cursor); + cursor += 2; + end.writeUInt16LE(entriesLength, cursor); + cursor += 2; + end.writeUInt16LE(entriesLength, cursor); + cursor += 2; + writeUint32LE(end, centralSize, cursor); + cursor += 4; + writeUint32LE(end, centralStart, cursor); + cursor += 4; + end.writeUInt16LE(0, cursor); + return end; +} + +function writeBuffer(fd: number, buffer: Buffer): void { + let written = 0; + while (written < buffer.length) { + written += fs.writeSync(fd, buffer, written, buffer.length - written); + } +} + +function writeStoredZip(outputPath: string, files: Iterable<{ name: string; data: Buffer }>): void { + const entries: ZipEntry[] = []; + let offset = 0; + const fd = fs.openSync(outputPath, 'w'); + + try { + for (const file of files) { + const fileName = Buffer.from(file.name, 'utf8'); + const fileSize = file.data.length; + const fileCrc32 = crc32(file.data); + const localHeader = createLocalFileHeader(fileName, fileCrc32, fileSize); + writeBuffer(fd, localHeader); + writeBuffer(fd, file.data); + entries.push({ + name: file.name, + crc32: fileCrc32, + size: fileSize, + localHeaderOffset: offset, + }); + offset += localHeader.length + fileSize; + } + + const centralStart = offset; + for (const entry of entries) { + const centralHeader = createCentralDirectoryHeader(entry); + writeBuffer(fd, centralHeader); + offset += centralHeader.length; + } + + const centralSize = offset - centralStart; + writeBuffer(fd, createEndOfCentralDirectory(entries.length, centralSize, centralStart)); + } catch (error) { + fs.closeSync(fd); + fs.rmSync(outputPath, { force: true }); + throw error; + } + + fs.closeSync(fd); +} + +export function buildDictionaryZip( + outputPath: string, + dictionaryTitle: string, + description: string, + revision: string, + termEntries: CharacterDictionaryTermEntry[], + images: CharacterDictionarySnapshotImage[], +): { zipPath: string; entryCount: number } { + ensureDir(path.dirname(outputPath)); + + function* zipFiles(): Iterable<{ name: string; data: Buffer }> { + yield { + name: 'index.json', + data: Buffer.from( + JSON.stringify(createIndex(dictionaryTitle, description, revision), null, 2), + 'utf8', + ), + }; + yield { + name: 'tag_bank_1.json', + data: Buffer.from(JSON.stringify(createTagBank()), 'utf8'), + }; + + for (const image of images) { + yield { + name: image.path, + data: Buffer.from(image.dataBase64, 'base64'), + }; + } + + const entriesPerBank = 10_000; + for (let i = 0; i < termEntries.length; i += entriesPerBank) { + yield { + name: `term_bank_${Math.floor(i / entriesPerBank) + 1}.json`, + data: Buffer.from(JSON.stringify(termEntries.slice(i, i + entriesPerBank)), 'utf8'), + }; + } + } + + writeStoredZip(outputPath, zipFiles()); + return { zipPath: outputPath, entryCount: termEntries.length }; +} diff --git a/src/main/controller-config-update.test.ts b/src/main/controller-config-update.test.ts index 73d0ab4..a3ffeb0 100644 --- a/src/main/controller-config-update.test.ts +++ b/src/main/controller-config-update.test.ts @@ -1,8 +1,18 @@ import assert from 'node:assert/strict'; +import fs from 'node:fs'; +import path from 'node:path'; import test from 'node:test'; import { applyControllerConfigUpdate } from './controller-config-update.js'; +test('SM-012 controller config update path does not use JSON serialize-clone helpers', () => { + const source = fs.readFileSync( + path.join(process.cwd(), 'src/main/controller-config-update.ts'), + 'utf-8', + ); + assert.equal(source.includes('JSON.parse(JSON.stringify('), false); +}); + test('applyControllerConfigUpdate replaces binding descriptors instead of deep-merging them', () => { const next = applyControllerConfigUpdate( { @@ -52,3 +62,16 @@ test('applyControllerConfigUpdate merges buttonIndices while replacing only upda assert.deepEqual(next.bindings?.toggleLookup, { kind: 'button', buttonIndex: 0 }); assert.deepEqual(next.bindings?.closeLookup, { kind: 'none' }); }); + +test('applyControllerConfigUpdate detaches updated binding values from the patch object', () => { + const update = { + bindings: { + toggleLookup: { kind: 'button' as const, buttonIndex: 7 }, + }, + }; + + const next = applyControllerConfigUpdate(undefined, update); + update.bindings.toggleLookup.buttonIndex = 99; + + assert.deepEqual(next.bindings?.toggleLookup, { kind: 'button', buttonIndex: 7 }); +}); diff --git a/src/main/controller-config-update.ts b/src/main/controller-config-update.ts index de58a89..c854069 100644 --- a/src/main/controller-config-update.ts +++ b/src/main/controller-config-update.ts @@ -28,7 +28,7 @@ export function applyControllerConfigUpdate( [keyof RawControllerBindings, RawControllerBindings[keyof RawControllerBindings] | undefined] >) { if (value === undefined) continue; - (nextBindings as Record)[key] = JSON.parse(JSON.stringify(value)); + (nextBindings as Record)[key] = structuredClone(value); } nextController.bindings = nextBindings; diff --git a/src/main/overlay-runtime.test.ts b/src/main/overlay-runtime.test.ts index a5d67a8..82c8a64 100644 --- a/src/main/overlay-runtime.test.ts +++ b/src/main/overlay-runtime.test.ts @@ -287,10 +287,14 @@ test('sendToActiveOverlayWindow can prefer modal window even when main overlay i setModalWindowBounds: () => {}, }); - const sent = runtime.sendToActiveOverlayWindow('youtube:picker-open', { sessionId: 'yt-1' }, { - restoreOnModalClose: 'youtube-track-picker', - preferModalWindow: true, - }); + const sent = runtime.sendToActiveOverlayWindow( + 'youtube:picker-open', + { sessionId: 'yt-1' }, + { + restoreOnModalClose: 'youtube-track-picker', + preferModalWindow: true, + }, + ); assert.equal(sent, true); assert.deepEqual(mainWindow.sent, []); @@ -309,10 +313,14 @@ test('modal window path makes visible main overlay click-through until modal clo setModalWindowBounds: () => {}, }); - const sent = runtime.sendToActiveOverlayWindow('youtube:picker-open', { sessionId: 'yt-1' }, { - restoreOnModalClose: 'youtube-track-picker', - preferModalWindow: true, - }); + const sent = runtime.sendToActiveOverlayWindow( + 'youtube:picker-open', + { sessionId: 'yt-1' }, + { + restoreOnModalClose: 'youtube-track-picker', + preferModalWindow: true, + }, + ); runtime.notifyOverlayModalOpened('youtube-track-picker'); assert.equal(sent, true); @@ -336,10 +344,14 @@ test('modal window path hides visible main overlay until modal closes', () => { setModalWindowBounds: () => {}, }); - runtime.sendToActiveOverlayWindow('youtube:picker-open', { sessionId: 'yt-1' }, { - restoreOnModalClose: 'youtube-track-picker', - preferModalWindow: true, - }); + runtime.sendToActiveOverlayWindow( + 'youtube:picker-open', + { sessionId: 'yt-1' }, + { + restoreOnModalClose: 'youtube-track-picker', + preferModalWindow: true, + }, + ); runtime.notifyOverlayModalOpened('youtube-track-picker'); assert.equal(mainWindow.getHideCount(), 1); @@ -516,9 +528,13 @@ test('waitForModalOpen resolves true after modal acknowledgement', async () => { setModalWindowBounds: () => {}, }); - runtime.sendToActiveOverlayWindow('youtube:picker-open', { sessionId: 'yt-1' }, { - restoreOnModalClose: 'youtube-track-picker', - }); + runtime.sendToActiveOverlayWindow( + 'youtube:picker-open', + { sessionId: 'yt-1' }, + { + restoreOnModalClose: 'youtube-track-picker', + }, + ); const pending = runtime.waitForModalOpen('youtube-track-picker', 1000); runtime.notifyOverlayModalOpened('youtube-track-picker'); diff --git a/src/main/overlay-runtime.ts b/src/main/overlay-runtime.ts index f6334b7..527c7af 100644 --- a/src/main/overlay-runtime.ts +++ b/src/main/overlay-runtime.ts @@ -357,10 +357,7 @@ export function createOverlayModalRuntimeService( showModalWindow(targetWindow); }; - const waitForModalOpen = async ( - modal: OverlayHostedModal, - timeoutMs: number, - ): Promise => + const waitForModalOpen = async (modal: OverlayHostedModal, timeoutMs: number): Promise => await new Promise((resolve) => { const waiters = modalOpenWaiters.get(modal) ?? []; const finish = (opened: boolean): void => { diff --git a/src/main/runtime/anilist-post-watch-main-deps.test.ts b/src/main/runtime/anilist-post-watch-main-deps.test.ts index 7bb48e8..fc3c7fe 100644 --- a/src/main/runtime/anilist-post-watch-main-deps.test.ts +++ b/src/main/runtime/anilist-post-watch-main-deps.test.ts @@ -21,7 +21,7 @@ test('process next anilist retry update main deps builder maps callbacks', async now: () => 7, })(); - assert.deepEqual(deps.nextReady(), { key: 'k', title: 't', episode: 1 }); + assert.deepEqual(deps.nextReady(), { key: 'k', title: 't', season: null, episode: 1 }); deps.refreshRetryQueueState(); deps.setLastAttemptAt(1); deps.setLastError('x'); diff --git a/src/main/runtime/anilist-setup.test.ts b/src/main/runtime/anilist-setup.test.ts index 80e8247..7741735 100644 --- a/src/main/runtime/anilist-setup.test.ts +++ b/src/main/runtime/anilist-setup.test.ts @@ -84,51 +84,63 @@ test('findAnilistSetupDeepLinkArgvUrl returns null when missing', () => { }); test('consumeAnilistSetupCallbackUrl persists token and closes window for callback URL', () => { + const originalDateNow = Date.now; const events: string[] = []; - const handled = consumeAnilistSetupCallbackUrl({ - rawUrl: 'https://anilist.subminer.moe/#access_token=saved-token', - saveToken: (value: string) => events.push(`save:${value}`), - setCachedToken: (value: string) => events.push(`cache:${value}`), - setResolvedState: (timestampMs: number) => - events.push(`state:${timestampMs > 0 ? 'ok' : 'bad'}`), - setSetupPageOpened: (opened: boolean) => events.push(`opened:${opened}`), - onSuccess: () => events.push('success'), - closeWindow: () => events.push('close'), - }); + try { + Date.now = () => 120_000; + const handled = consumeAnilistSetupCallbackUrl({ + rawUrl: 'https://anilist.subminer.moe/#access_token=saved-token', + saveToken: (value: string) => events.push(`save:${value}`), + setCachedToken: (value: string) => events.push(`cache:${value}`), + setResolvedState: (timestampMs: number) => + events.push(`state:${timestampMs > 0 ? 'ok' : 'bad'}`), + setSetupPageOpened: (opened: boolean) => events.push(`opened:${opened}`), + onSuccess: () => events.push('success'), + closeWindow: () => events.push('close'), + }); - assert.equal(handled, true); - assert.deepEqual(events, [ - 'save:saved-token', - 'cache:saved-token', - 'state:ok', - 'opened:false', - 'success', - 'close', - ]); + assert.equal(handled, true); + assert.deepEqual(events, [ + 'save:saved-token', + 'cache:saved-token', + 'state:ok', + 'opened:false', + 'success', + 'close', + ]); + } finally { + Date.now = originalDateNow; + } }); test('consumeAnilistSetupCallbackUrl persists token for subminer deep link URL', () => { + const originalDateNow = Date.now; const events: string[] = []; - const handled = consumeAnilistSetupCallbackUrl({ - rawUrl: 'subminer://anilist-setup?access_token=saved-token', - saveToken: (value: string) => events.push(`save:${value}`), - setCachedToken: (value: string) => events.push(`cache:${value}`), - setResolvedState: (timestampMs: number) => - events.push(`state:${timestampMs > 0 ? 'ok' : 'bad'}`), - setSetupPageOpened: (opened: boolean) => events.push(`opened:${opened}`), - onSuccess: () => events.push('success'), - closeWindow: () => events.push('close'), - }); + try { + Date.now = () => 120_000; + const handled = consumeAnilistSetupCallbackUrl({ + rawUrl: 'subminer://anilist-setup?access_token=saved-token', + saveToken: (value: string) => events.push(`save:${value}`), + setCachedToken: (value: string) => events.push(`cache:${value}`), + setResolvedState: (timestampMs: number) => + events.push(`state:${timestampMs > 0 ? 'ok' : 'bad'}`), + setSetupPageOpened: (opened: boolean) => events.push(`opened:${opened}`), + onSuccess: () => events.push('success'), + closeWindow: () => events.push('close'), + }); - assert.equal(handled, true); - assert.deepEqual(events, [ - 'save:saved-token', - 'cache:saved-token', - 'state:ok', - 'opened:false', - 'success', - 'close', - ]); + assert.equal(handled, true); + assert.deepEqual(events, [ + 'save:saved-token', + 'cache:saved-token', + 'state:ok', + 'opened:false', + 'success', + 'close', + ]); + } finally { + Date.now = originalDateNow; + } }); test('consumeAnilistSetupCallbackUrl ignores non-callback URLs', () => { diff --git a/src/main/runtime/autoplay-ready-gate.test.ts b/src/main/runtime/autoplay-ready-gate.test.ts new file mode 100644 index 0000000..8ad5313 --- /dev/null +++ b/src/main/runtime/autoplay-ready-gate.test.ts @@ -0,0 +1,98 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createAutoplayReadyGate } from './autoplay-ready-gate'; + +test('autoplay ready gate suppresses duplicate media signals unless forced while paused', async () => { + const commands: Array> = []; + const scheduled: Array<() => void> = []; + + const gate = createAutoplayReadyGate({ + isAppOwnedFlowInFlight: () => false, + getCurrentMediaPath: () => '/media/video.mkv', + getCurrentVideoPath: () => null, + getPlaybackPaused: () => true, + getMpvClient: () => + ({ + connected: true, + requestProperty: async () => true, + send: ({ command }: { command: Array }) => { + commands.push(command); + }, + }) as never, + signalPluginAutoplayReady: () => { + commands.push(['script-message', 'subminer-autoplay-ready']); + }, + schedule: (callback) => { + scheduled.push(callback); + return 1 as never; + }, + logDebug: () => {}, + }); + + gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null }); + gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null }); + gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null }, { forceWhilePaused: true }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + const firstScheduled = scheduled.shift(); + firstScheduled?.(); + await new Promise((resolve) => setTimeout(resolve, 0)); + + assert.deepEqual(commands.filter((command) => command[0] === 'script-message'), [ + ['script-message', 'subminer-autoplay-ready'], + ]); + assert.ok( + commands.some( + (command) => + command[0] === 'set_property' && command[1] === 'pause' && command[2] === false, + ), + ); + assert.equal(scheduled.length > 0, true); +}); + +test('autoplay ready gate retry loop does not re-signal plugin readiness', async () => { + const commands: Array> = []; + const scheduled: Array<() => void> = []; + + const gate = createAutoplayReadyGate({ + isAppOwnedFlowInFlight: () => false, + getCurrentMediaPath: () => '/media/video.mkv', + getCurrentVideoPath: () => null, + getPlaybackPaused: () => true, + getMpvClient: () => + ({ + connected: true, + requestProperty: async () => true, + send: ({ command }: { command: Array }) => { + commands.push(command); + }, + }) as never, + signalPluginAutoplayReady: () => { + commands.push(['script-message', 'subminer-autoplay-ready']); + }, + schedule: (callback) => { + scheduled.push(callback); + return 1 as never; + }, + logDebug: () => {}, + }); + + gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null }, { forceWhilePaused: true }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + for (const callback of scheduled.splice(0, 3)) { + callback(); + await new Promise((resolve) => setTimeout(resolve, 0)); + } + + assert.deepEqual(commands.filter((command) => command[0] === 'script-message'), [ + ['script-message', 'subminer-autoplay-ready'], + ]); + assert.equal( + commands.filter( + (command) => + command[0] === 'set_property' && command[1] === 'pause' && command[2] === false, + ).length > 0, + true, + ); +}); diff --git a/src/main/runtime/autoplay-ready-gate.ts b/src/main/runtime/autoplay-ready-gate.ts new file mode 100644 index 0000000..cd916d6 --- /dev/null +++ b/src/main/runtime/autoplay-ready-gate.ts @@ -0,0 +1,128 @@ +import type { SubtitleData } from '../../types'; +import { resolveAutoplayReadyMaxReleaseAttempts } from './startup-autoplay-release-policy'; + +type MpvClientLike = { + connected?: boolean; + requestProperty: (property: string) => Promise; + send: (payload: { command: Array }) => void; +}; + +export type AutoplayReadyGateDeps = { + isAppOwnedFlowInFlight: () => boolean; + getCurrentMediaPath: () => string | null; + getCurrentVideoPath: () => string | null; + getPlaybackPaused: () => boolean | null; + getMpvClient: () => MpvClientLike | null; + signalPluginAutoplayReady: () => void; + schedule: (callback: () => void, delayMs: number) => ReturnType; + logDebug: (message: string) => void; +}; + +export function createAutoplayReadyGate(deps: AutoplayReadyGateDeps) { + let autoPlayReadySignalMediaPath: string | null = null; + let autoPlayReadySignalGeneration = 0; + + const invalidatePendingAutoplayReadyFallbacks = (): void => { + autoPlayReadySignalMediaPath = null; + autoPlayReadySignalGeneration += 1; + }; + + const maybeSignalPluginAutoplayReady = ( + payload: SubtitleData, + options?: { forceWhilePaused?: boolean }, + ): void => { + if (deps.isAppOwnedFlowInFlight()) { + deps.logDebug('[autoplay-ready] suppressed while app-owned YouTube flow is active'); + return; + } + if (!payload.text.trim()) { + return; + } + + const mediaPath = + deps.getCurrentMediaPath()?.trim() || + deps.getCurrentVideoPath()?.trim() || + '__unknown__'; + const duplicateMediaSignal = autoPlayReadySignalMediaPath === mediaPath; + const allowDuplicateWhilePaused = + options?.forceWhilePaused === true && deps.getPlaybackPaused() !== false; + const releaseRetryDelayMs = 200; + const maxReleaseAttempts = resolveAutoplayReadyMaxReleaseAttempts({ + forceWhilePaused: options?.forceWhilePaused === true, + retryDelayMs: releaseRetryDelayMs, + }); + + const isPlaybackPaused = async (client: MpvClientLike): Promise => { + try { + const pauseProperty = await client.requestProperty('pause'); + if (typeof pauseProperty === 'boolean') { + return pauseProperty; + } + if (typeof pauseProperty === 'string') { + return pauseProperty.toLowerCase() !== 'no' && pauseProperty !== '0'; + } + if (typeof pauseProperty === 'number') { + return pauseProperty !== 0; + } + } catch (error) { + deps.logDebug( + `[autoplay-ready] failed to read pause property for media ${mediaPath}: ${ + error instanceof Error ? error.message : String(error) + }`, + ); + } + + return true; + }; + + const attemptRelease = (playbackGeneration: number, attempt: number): void => { + void (async () => { + if ( + autoPlayReadySignalMediaPath !== mediaPath || + playbackGeneration !== autoPlayReadySignalGeneration + ) { + return; + } + + const mpvClient = deps.getMpvClient(); + if (!mpvClient?.connected) { + if (attempt < maxReleaseAttempts) { + deps.schedule(() => attemptRelease(playbackGeneration, attempt + 1), releaseRetryDelayMs); + } + return; + } + + const shouldUnpause = await isPlaybackPaused(mpvClient); + if (!shouldUnpause) { + return; + } + + mpvClient.send({ command: ['set_property', 'pause', false] }); + if (attempt < maxReleaseAttempts) { + deps.schedule(() => attemptRelease(playbackGeneration, attempt + 1), releaseRetryDelayMs); + } + })(); + }; + + if (duplicateMediaSignal && !allowDuplicateWhilePaused) { + return; + } + + if (!duplicateMediaSignal) { + autoPlayReadySignalMediaPath = mediaPath; + const playbackGeneration = ++autoPlayReadySignalGeneration; + deps.signalPluginAutoplayReady(); + attemptRelease(playbackGeneration, 0); + return; + } + + const playbackGeneration = ++autoPlayReadySignalGeneration; + attemptRelease(playbackGeneration, 0); + }; + + return { + getAutoPlayReadySignalMediaPath: (): string | null => autoPlayReadySignalMediaPath, + invalidatePendingAutoplayReadyFallbacks, + maybeSignalPluginAutoplayReady, + }; +} diff --git a/src/main/runtime/character-dictionary-auto-sync.ts b/src/main/runtime/character-dictionary-auto-sync.ts index c9b78a1..d65e1d6 100644 --- a/src/main/runtime/character-dictionary-auto-sync.ts +++ b/src/main/runtime/character-dictionary-auto-sync.ts @@ -1,5 +1,6 @@ import * as fs from 'fs'; import * as path from 'path'; +import { ensureDir } from '../../shared/fs-utils'; import type { AnilistCharacterDictionaryProfileScope } from '../../types'; import type { CharacterDictionarySnapshotProgressCallbacks, @@ -63,12 +64,6 @@ export interface CharacterDictionaryAutoSyncRuntimeDeps { onSyncComplete?: (result: { mediaId: number; mediaTitle: string; changed: boolean }) => void; } -function ensureDir(dirPath: string): void { - if (!fs.existsSync(dirPath)) { - fs.mkdirSync(dirPath, { recursive: true }); - } -} - function normalizeMediaId(rawMediaId: number): number | null { const mediaId = Math.max(1, Math.floor(rawMediaId)); return Number.isFinite(mediaId) ? mediaId : null; diff --git a/src/main/runtime/composers/anilist-setup-composer.test.ts b/src/main/runtime/composers/anilist-setup-composer.test.ts index df22dd9..3e81cac 100644 --- a/src/main/runtime/composers/anilist-setup-composer.test.ts +++ b/src/main/runtime/composers/anilist-setup-composer.test.ts @@ -3,11 +3,14 @@ import assert from 'node:assert/strict'; import { composeAnilistSetupHandlers } from './anilist-setup-composer'; test('composeAnilistSetupHandlers returns callable setup handlers', () => { + const calls: string[] = []; const composed = composeAnilistSetupHandlers({ notifyDeps: { hasMpvClient: () => false, showMpvOsd: () => {}, - showDesktopNotification: () => {}, + showDesktopNotification: (title, opts) => { + calls.push(`notify:${opts.body}`); + }, logInfo: () => {}, }, consumeTokenDeps: { @@ -37,4 +40,16 @@ test('composeAnilistSetupHandlers returns callable setup handlers', () => { assert.equal(typeof composed.consumeAnilistSetupTokenFromUrl, 'function'); assert.equal(typeof composed.handleAnilistSetupProtocolUrl, 'function'); assert.equal(typeof composed.registerSubminerProtocolClient, 'function'); + + // notifyAnilistSetup forwards to showDesktopNotification when no MPV client + composed.notifyAnilistSetup('Setup complete'); + assert.deepEqual(calls, ['notify:Setup complete']); + + // handleAnilistSetupProtocolUrl returns false for non-subminer URLs + const handled = composed.handleAnilistSetupProtocolUrl('https://other.example.com/'); + assert.equal(handled, false); + + // handleAnilistSetupProtocolUrl returns true for subminer:// URLs + const handledProtocol = composed.handleAnilistSetupProtocolUrl('subminer://anilist-setup?code=abc'); + assert.equal(handledProtocol, true); }); diff --git a/src/main/runtime/composers/app-ready-composer.test.ts b/src/main/runtime/composers/app-ready-composer.test.ts index 11b8641..d4e98eb 100644 --- a/src/main/runtime/composers/app-ready-composer.test.ts +++ b/src/main/runtime/composers/app-ready-composer.test.ts @@ -3,9 +3,13 @@ import test from 'node:test'; import { composeAppReadyRuntime } from './app-ready-composer'; test('composeAppReadyRuntime returns reload/critical/app-ready handlers', () => { + const calls: string[] = []; const composed = composeAppReadyRuntime({ reloadConfigMainDeps: { - reloadConfigStrict: () => ({ ok: true, path: '/tmp/config.jsonc', warnings: [] }), + reloadConfigStrict: () => { + calls.push('reloadConfigStrict'); + return { ok: true, path: '/tmp/config.jsonc', warnings: [] }; + }, logInfo: () => {}, logWarning: () => {}, showDesktopNotification: () => {}, @@ -79,4 +83,8 @@ test('composeAppReadyRuntime returns reload/critical/app-ready handlers', () => assert.equal(typeof composed.reloadConfig, 'function'); assert.equal(typeof composed.criticalConfigError, 'function'); assert.equal(typeof composed.appReadyRuntimeRunner, 'function'); + + // reloadConfig invokes the injected reloadConfigStrict dep + composed.reloadConfig(); + assert.deepEqual(calls, ['reloadConfigStrict']); }); diff --git a/src/main/runtime/composers/cli-startup-composer.test.ts b/src/main/runtime/composers/cli-startup-composer.test.ts new file mode 100644 index 0000000..071befc --- /dev/null +++ b/src/main/runtime/composers/cli-startup-composer.test.ts @@ -0,0 +1,91 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import type { CliArgs } from '../../../cli/args'; +import { composeCliStartupHandlers } from './cli-startup-composer'; + +test('composeCliStartupHandlers returns callable CLI startup handlers', () => { + const calls: string[] = []; + const handlers = composeCliStartupHandlers({ + cliCommandContextMainDeps: { + appState: {} as never, + setLogLevel: () => {}, + texthookerService: {} as never, + getResolvedConfig: () => ({}) as never, + openExternal: async () => {}, + logBrowserOpenError: () => {}, + showMpvOsd: () => {}, + initializeOverlayRuntime: () => {}, + toggleVisibleOverlay: () => {}, + openFirstRunSetupWindow: () => {}, + setVisibleOverlayVisible: () => {}, + copyCurrentSubtitle: () => {}, + startPendingMultiCopy: () => {}, + mineSentenceCard: async () => {}, + startPendingMineSentenceMultiple: () => {}, + updateLastCardFromClipboard: async () => {}, + refreshKnownWordCache: async () => {}, + triggerFieldGrouping: async () => {}, + triggerSubsyncFromConfig: async () => {}, + markLastCardAsAudioCard: async () => {}, + getAnilistStatus: () => ({}) as never, + clearAnilistToken: () => {}, + openAnilistSetupWindow: () => {}, + openJellyfinSetupWindow: () => {}, + getAnilistQueueStatus: () => ({}) as never, + processNextAnilistRetryUpdate: async () => ({ ok: true, message: 'done' }), + generateCharacterDictionary: async () => + ({ zipPath: '/tmp/test.zip', fromCache: false, mediaId: 1, mediaTitle: 'Test', entryCount: 1 }), + runJellyfinCommand: async () => {}, + runStatsCommand: async () => {}, + runYoutubePlaybackFlow: async () => {}, + openYomitanSettings: () => {}, + cycleSecondarySubMode: () => {}, + openRuntimeOptionsPalette: () => {}, + printHelp: () => {}, + stopApp: () => {}, + hasMainWindow: () => false, + getMultiCopyTimeoutMs: () => 0, + schedule: () => 0 as never, + logInfo: () => {}, + logWarn: () => {}, + logError: () => {}, + }, + cliCommandRuntimeHandlerMainDeps: { + handleTexthookerOnlyModeTransitionMainDeps: { + isTexthookerOnlyMode: () => false, + ensureOverlayStartupPrereqs: () => {}, + setTexthookerOnlyMode: () => {}, + commandNeedsOverlayStartupPrereqs: () => false, + startBackgroundWarmups: () => {}, + logInfo: () => {}, + }, + handleCliCommandRuntimeServiceWithContext: (args, _source, _ctx) => { + calls.push(`handleCommand:${(args as { command?: string }).command ?? 'unknown'}`); + }, + }, + initialArgsRuntimeHandlerMainDeps: { + getInitialArgs: () => null, + isBackgroundMode: () => false, + shouldEnsureTrayOnStartup: () => false, + shouldRunHeadlessInitialCommand: () => false, + ensureTray: () => {}, + isTexthookerOnlyMode: () => false, + hasImmersionTracker: () => false, + getMpvClient: () => null, + commandNeedsOverlayStartupPrereqs: () => false, + commandNeedsOverlayRuntime: () => false, + ensureOverlayStartupPrereqs: () => {}, + isOverlayRuntimeInitialized: () => false, + initializeOverlayRuntime: () => {}, + logInfo: () => {}, + }, + }); + + assert.equal(typeof handlers.createCliCommandContext, 'function'); + assert.equal(typeof handlers.handleCliCommand, 'function'); + assert.equal(typeof handlers.handleInitialArgs, 'function'); + + // handleCliCommand routes to the injected handleCliCommandRuntimeServiceWithContext dep + handlers.handleCliCommand({ command: 'start' } as unknown as CliArgs); + assert.deepEqual(calls, ['handleCommand:start']); +}); diff --git a/src/main/runtime/composers/cli-startup-composer.ts b/src/main/runtime/composers/cli-startup-composer.ts new file mode 100644 index 0000000..a473c0a --- /dev/null +++ b/src/main/runtime/composers/cli-startup-composer.ts @@ -0,0 +1,50 @@ +import type { CliArgs, CliCommandSource } from '../../../cli/args'; +import { createCliCommandContextFactory } from '../cli-command-context-factory'; +import { createCliCommandRuntimeHandler } from '../cli-command-runtime-handler'; +import { createInitialArgsRuntimeHandler } from '../initial-args-runtime-handler'; +import type { ComposerInputs, ComposerOutputs } from './contracts'; + +type CliCommandContextMainDeps = Parameters[0]; +type CliCommandContext = ReturnType>; +type CliCommandRuntimeHandlerMainDeps = Omit< + Parameters>[0], + 'createCliCommandContext' +>; +type InitialArgsRuntimeHandlerMainDeps = Omit< + Parameters[0], + 'handleCliCommand' +>; + +export type CliStartupComposerOptions = ComposerInputs<{ + cliCommandContextMainDeps: CliCommandContextMainDeps; + cliCommandRuntimeHandlerMainDeps: CliCommandRuntimeHandlerMainDeps; + initialArgsRuntimeHandlerMainDeps: InitialArgsRuntimeHandlerMainDeps; +}>; + +export type CliStartupComposerResult = ComposerOutputs<{ + createCliCommandContext: () => CliCommandContext; + handleCliCommand: (args: CliArgs, source?: CliCommandSource) => void; + handleInitialArgs: () => void; +}>; + +export function composeCliStartupHandlers( + options: CliStartupComposerOptions, +): CliStartupComposerResult { + const createCliCommandContext = createCliCommandContextFactory( + options.cliCommandContextMainDeps, + ); + const handleCliCommand = createCliCommandRuntimeHandler({ + ...options.cliCommandRuntimeHandlerMainDeps, + createCliCommandContext: () => createCliCommandContext(), + }); + const handleInitialArgs = createInitialArgsRuntimeHandler({ + ...options.initialArgsRuntimeHandlerMainDeps, + handleCliCommand: (args, source) => handleCliCommand(args, source), + }); + + return { + createCliCommandContext, + handleCliCommand, + handleInitialArgs, + }; +} diff --git a/src/main/runtime/composers/headless-startup-composer.test.ts b/src/main/runtime/composers/headless-startup-composer.test.ts new file mode 100644 index 0000000..2a2c9eb --- /dev/null +++ b/src/main/runtime/composers/headless-startup-composer.test.ts @@ -0,0 +1,66 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import type { CliArgs } from '../../../cli/args'; +import { composeHeadlessStartupHandlers } from './headless-startup-composer'; + +test('composeHeadlessStartupHandlers returns startup bootstrap handlers', () => { + const calls: string[] = []; + + const handlers = composeHeadlessStartupHandlers< + CliArgs, + { mode: string }, + { startAppLifecycle: (args: CliArgs) => void } + >({ + startupRuntimeHandlersDeps: { + appLifecycleRuntimeRunnerMainDeps: { + app: { on: () => {} } as never, + platform: 'darwin', + shouldStartApp: () => true, + parseArgs: () => ({}) as never, + handleCliCommand: () => {}, + printHelp: () => {}, + logNoRunningInstance: () => {}, + onReady: async () => {}, + onWillQuitCleanup: () => {}, + shouldRestoreWindowsOnActivate: () => false, + restoreWindowsOnActivate: () => {}, + shouldQuitOnWindowAllClosed: () => false, + }, + createAppLifecycleRuntimeRunner: () => (args) => { + calls.push(`lifecycle:${(args as { command?: string }).command ?? 'unknown'}`); + }, + buildStartupBootstrapMainDeps: (startAppLifecycle) => ({ + argv: ['node', 'main.js'], + parseArgs: () => ({ command: 'start' }) as never, + setLogLevel: () => {}, + forceX11Backend: () => {}, + enforceUnsupportedWaylandMode: () => {}, + shouldStartApp: () => true, + getDefaultSocketPath: () => '/tmp/mpv.sock', + defaultTexthookerPort: 5174, + configDir: '/tmp/config', + defaultConfig: {} as never, + generateConfigTemplate: () => 'template', + generateDefaultConfigFile: async () => 0, + setExitCode: () => {}, + quitApp: () => {}, + logGenerateConfigError: () => {}, + startAppLifecycle: (args) => startAppLifecycle(args as never), + }), + createStartupBootstrapRuntimeDeps: (deps) => ({ + startAppLifecycle: deps.startAppLifecycle, + }), + runStartupBootstrapRuntime: (deps) => { + deps.startAppLifecycle({ command: 'start' } as unknown as CliArgs); + return { mode: 'started' }; + }, + applyStartupState: (state) => { + calls.push(`apply:${state.mode}`); + }, + }, + }); + + assert.equal(typeof handlers.runAndApplyStartupState, 'function'); + assert.deepEqual(handlers.runAndApplyStartupState(), { mode: 'started' }); + assert.deepEqual(calls, ['lifecycle:start', 'apply:started']); +}); diff --git a/src/main/runtime/composers/headless-startup-composer.ts b/src/main/runtime/composers/headless-startup-composer.ts new file mode 100644 index 0000000..033c37f --- /dev/null +++ b/src/main/runtime/composers/headless-startup-composer.ts @@ -0,0 +1,49 @@ +import { createStartupRuntimeHandlers } from '../startup-runtime-handlers'; +import type { ComposerInputs, ComposerOutputs } from './contracts'; + +type StartupRuntimeHandlersDeps = Parameters< + typeof createStartupRuntimeHandlers +>[0]; +type StartupRuntimeHandlers = ReturnType< + typeof createStartupRuntimeHandlers +>; + +export type HeadlessStartupComposerOptions< + TCliArgs, + TStartupState, + TStartupBootstrapRuntimeDeps, +> = ComposerInputs<{ + startupRuntimeHandlersDeps: StartupRuntimeHandlersDeps< + TCliArgs, + TStartupState, + TStartupBootstrapRuntimeDeps + >; +}>; + +export type HeadlessStartupComposerResult< + TCliArgs, + TStartupState, + TStartupBootstrapRuntimeDeps, +> = ComposerOutputs< + Pick< + StartupRuntimeHandlers, + 'appLifecycleRuntimeRunner' | 'runAndApplyStartupState' + > +>; + +export function composeHeadlessStartupHandlers< + TCliArgs, + TStartupState, + TStartupBootstrapRuntimeDeps, +>( + options: HeadlessStartupComposerOptions, +): HeadlessStartupComposerResult { + const { appLifecycleRuntimeRunner, runAndApplyStartupState } = createStartupRuntimeHandlers( + options.startupRuntimeHandlersDeps, + ); + + return { + appLifecycleRuntimeRunner, + runAndApplyStartupState, + }; +} diff --git a/src/main/runtime/composers/index.ts b/src/main/runtime/composers/index.ts index 4506366..af44379 100644 --- a/src/main/runtime/composers/index.ts +++ b/src/main/runtime/composers/index.ts @@ -1,10 +1,13 @@ export * from './anilist-setup-composer'; export * from './anilist-tracking-composer'; export * from './app-ready-composer'; +export * from './cli-startup-composer'; export * from './contracts'; +export * from './headless-startup-composer'; export * from './ipc-runtime-composer'; export * from './jellyfin-remote-composer'; export * from './jellyfin-runtime-composer'; export * from './mpv-runtime-composer'; +export * from './overlay-visibility-runtime-composer'; export * from './shortcuts-runtime-composer'; export * from './startup-lifecycle-composer'; diff --git a/src/main/runtime/composers/jellyfin-remote-composer.test.ts b/src/main/runtime/composers/jellyfin-remote-composer.test.ts index e3fa0f8..ec6525f 100644 --- a/src/main/runtime/composers/jellyfin-remote-composer.test.ts +++ b/src/main/runtime/composers/jellyfin-remote-composer.test.ts @@ -2,8 +2,11 @@ import test from 'node:test'; import assert from 'node:assert/strict'; import { composeJellyfinRemoteHandlers } from './jellyfin-remote-composer'; -test('composeJellyfinRemoteHandlers returns callable jellyfin remote handlers', () => { +test('composeJellyfinRemoteHandlers returns callable jellyfin remote handlers', async () => { let lastProgressAt = 0; + let activePlayback: unknown = { itemId: 'item-1', mediaSourceId: 'src-1', playMethod: 'DirectPlay', audioStreamIndex: null, subtitleStreamIndex: null }; + const calls: string[] = []; + const composed = composeJellyfinRemoteHandlers({ getConfiguredSession: () => null, getClientInfo: () => @@ -14,8 +17,11 @@ test('composeJellyfinRemoteHandlers returns callable jellyfin remote handlers', getMpvClient: () => null, sendMpvCommand: () => {}, jellyfinTicksToSeconds: () => 0, - getActivePlayback: () => null, - clearActivePlayback: () => {}, + getActivePlayback: () => activePlayback as never, + clearActivePlayback: () => { + activePlayback = null; + calls.push('clearActivePlayback'); + }, getSession: () => null, getNow: () => 0, getLastProgressAtMs: () => lastProgressAt, @@ -32,4 +38,9 @@ test('composeJellyfinRemoteHandlers returns callable jellyfin remote handlers', assert.equal(typeof composed.handleJellyfinRemotePlay, 'function'); assert.equal(typeof composed.handleJellyfinRemotePlaystate, 'function'); assert.equal(typeof composed.handleJellyfinRemoteGeneralCommand, 'function'); + + // reportJellyfinRemoteStopped clears active playback when there is no connected session + await composed.reportJellyfinRemoteStopped(); + assert.equal(activePlayback, null); + assert.deepEqual(calls, ['clearActivePlayback']); }); diff --git a/src/main/runtime/composers/jellyfin-runtime-composer.test.ts b/src/main/runtime/composers/jellyfin-runtime-composer.test.ts index ed68a1d..53b7486 100644 --- a/src/main/runtime/composers/jellyfin-runtime-composer.test.ts +++ b/src/main/runtime/composers/jellyfin-runtime-composer.test.ts @@ -190,4 +190,9 @@ test('composeJellyfinRuntimeHandlers returns callable jellyfin runtime handlers' assert.equal(typeof composed.stopJellyfinRemoteSession, 'function'); assert.equal(typeof composed.runJellyfinCommand, 'function'); assert.equal(typeof composed.openJellyfinSetupWindow, 'function'); + + // getResolvedJellyfinConfig forwards to the injected getResolvedConfig dep + const jellyfinConfig = composed.getResolvedJellyfinConfig(); + assert.equal(jellyfinConfig.enabled, false); + assert.equal(jellyfinConfig.serverUrl, ''); }); diff --git a/src/main/runtime/composers/mpv-runtime-composer.test.ts b/src/main/runtime/composers/mpv-runtime-composer.test.ts index 72a780e..dcbd375 100644 --- a/src/main/runtime/composers/mpv-runtime-composer.test.ts +++ b/src/main/runtime/composers/mpv-runtime-composer.test.ts @@ -30,37 +30,13 @@ function createDeferred(): { promise: Promise; resolve: () => void } { return { promise, resolve }; } -test('composeMpvRuntimeHandlers returns callable handlers and forwards to injected deps', async () => { - const calls: string[] = []; - let started = false; - let metrics = BASE_METRICS; - let mecabTokenizer: { id: string } | null = null; +class DefaultFakeMpvClient { + connect(): void {} + on(): void {} +} - class FakeMpvClient { - connected = false; - - constructor( - public socketPath: string, - public options: unknown, - ) { - const autoStartOverlay = (options as { autoStartOverlay: boolean }).autoStartOverlay; - calls.push(`create-client:${socketPath}`); - calls.push(`auto-start:${String(autoStartOverlay)}`); - } - - on(): void {} - - connect(): void { - this.connected = true; - calls.push('client-connect'); - } - } - - const composed = composeMpvRuntimeHandlers< - FakeMpvClient, - { isKnownWord: (text: string) => boolean }, - { text: string } - >({ +function createDefaultMpvFixture() { + return { bindMpvMainEventHandlersMainDeps: { appState: { initialArgs: null, @@ -97,15 +73,119 @@ test('composeMpvRuntimeHandlers returns callable handlers and forwards to inject updateSubtitleRenderMetrics: () => {}, }, mpvClientRuntimeServiceFactoryMainDeps: { - createClient: FakeMpvClient, + createClient: DefaultFakeMpvClient, getSocketPath: () => '/tmp/mpv.sock', getResolvedConfig: () => ({ auto_start_overlay: false }), - isAutoStartOverlayEnabled: () => true, + isAutoStartOverlayEnabled: () => false, setOverlayVisible: () => {}, isVisibleOverlayVisible: () => false, getReconnectTimer: () => null, setReconnectTimer: () => {}, }, + updateMpvSubtitleRenderMetricsMainDeps: { + getCurrentMetrics: () => BASE_METRICS, + setCurrentMetrics: () => {}, + applyPatch: (current: MpvSubtitleRenderMetrics, patch: Partial) => ({ + next: { ...current, ...patch }, + changed: true, + }), + broadcastMetrics: () => {}, + }, + tokenizer: { + buildTokenizerDepsMainDeps: { + getYomitanExt: () => null, + getYomitanParserWindow: () => null, + setYomitanParserWindow: () => {}, + getYomitanParserReadyPromise: () => null, + setYomitanParserReadyPromise: () => {}, + getYomitanParserInitPromise: () => null, + setYomitanParserInitPromise: () => {}, + isKnownWord: () => false, + recordLookup: () => {}, + getKnownWordMatchMode: () => 'headword' as const, + getNPlusOneEnabled: () => false, + getMinSentenceWordsForNPlusOne: () => 3, + getJlptLevel: () => null, + getJlptEnabled: () => false, + getFrequencyDictionaryEnabled: () => false, + getFrequencyDictionaryMatchMode: () => 'headword' as const, + getFrequencyRank: () => null, + getYomitanGroupDebugEnabled: () => false, + getMecabTokenizer: () => null, + }, + createTokenizerRuntimeDeps: () => ({ isKnownWord: () => false }), + tokenizeSubtitle: async (text: string) => ({ text }), + createMecabTokenizerAndCheckMainDeps: { + getMecabTokenizer: () => null, + setMecabTokenizer: () => {}, + createMecabTokenizer: () => ({ id: 'mecab' }), + checkAvailability: async () => {}, + }, + prewarmSubtitleDictionariesMainDeps: { + ensureJlptDictionaryLookup: async () => {}, + ensureFrequencyDictionaryLookup: async () => {}, + }, + }, + warmups: { + launchBackgroundWarmupTaskMainDeps: { + now: () => 0, + logDebug: () => {}, + logWarn: () => {}, + }, + startBackgroundWarmupsMainDeps: { + getStarted: () => false, + setStarted: () => {}, + isTexthookerOnlyMode: () => false, + ensureYomitanExtensionLoaded: async () => {}, + shouldWarmupMecab: () => false, + shouldWarmupYomitanExtension: () => false, + shouldWarmupSubtitleDictionaries: () => false, + shouldWarmupJellyfinRemoteSession: () => false, + shouldAutoConnectJellyfinRemote: () => false, + startJellyfinRemoteSession: async () => {}, + }, + }, + }; +} + +test('composeMpvRuntimeHandlers returns callable handlers and forwards to injected deps', async () => { + const calls: string[] = []; + let started = false; + let metrics = BASE_METRICS; + let mecabTokenizer: { id: string } | null = null; + + class FakeMpvClient { + connected = false; + + constructor( + public socketPath: string, + public options: unknown, + ) { + const autoStartOverlay = (options as { autoStartOverlay: boolean }).autoStartOverlay; + calls.push(`create-client:${socketPath}`); + calls.push(`auto-start:${String(autoStartOverlay)}`); + } + + on(): void {} + + connect(): void { + this.connected = true; + calls.push('client-connect'); + } + } + + const fixture = createDefaultMpvFixture(); + const composed = composeMpvRuntimeHandlers< + FakeMpvClient, + { isKnownWord: (text: string) => boolean }, + { text: string } + >({ + ...fixture, + mpvClientRuntimeServiceFactoryMainDeps: { + ...fixture.mpvClientRuntimeServiceFactoryMainDeps, + createClient: FakeMpvClient, + isAutoStartOverlayEnabled: () => true, + }, updateMpvSubtitleRenderMetricsMainDeps: { getCurrentMetrics: () => metrics, setCurrentMetrics: (next) => { @@ -121,25 +201,12 @@ test('composeMpvRuntimeHandlers returns callable handlers and forwards to inject }, }, tokenizer: { + ...fixture.tokenizer, buildTokenizerDepsMainDeps: { - getYomitanExt: () => null, - getYomitanParserWindow: () => null, - setYomitanParserWindow: () => {}, - getYomitanParserReadyPromise: () => null, - setYomitanParserReadyPromise: () => {}, - getYomitanParserInitPromise: () => null, - setYomitanParserInitPromise: () => {}, + ...fixture.tokenizer.buildTokenizerDepsMainDeps, isKnownWord: (text) => text === 'known', - recordLookup: () => {}, - getKnownWordMatchMode: () => 'headword', - getMinSentenceWordsForNPlusOne: () => 3, - getJlptLevel: () => null, getJlptEnabled: () => true, getFrequencyDictionaryEnabled: () => true, - getFrequencyDictionaryMatchMode: () => 'headword', - getFrequencyRank: () => null, - getYomitanGroupDebugEnabled: () => false, - getMecabTokenizer: () => null, }, createTokenizerRuntimeDeps: (deps) => { calls.push('create-tokenizer-runtime-deps'); @@ -184,12 +251,12 @@ test('composeMpvRuntimeHandlers returns callable handlers and forwards to inject }, }, startBackgroundWarmupsMainDeps: { + ...fixture.warmups.startBackgroundWarmupsMainDeps, getStarted: () => started, setStarted: (next) => { started = next; calls.push(`set-started:${String(next)}`); }, - isTexthookerOnlyMode: () => false, ensureYomitanExtensionLoaded: async () => { calls.push('warmup-yomitan'); }, @@ -197,7 +264,6 @@ test('composeMpvRuntimeHandlers returns callable handlers and forwards to inject shouldWarmupYomitanExtension: () => true, shouldWarmupSubtitleDictionaries: () => true, shouldWarmupJellyfinRemoteSession: () => true, - shouldAutoConnectJellyfinRemote: () => false, startJellyfinRemoteSession: async () => { calls.push('warmup-jellyfin'); }, @@ -264,86 +330,20 @@ test('composeMpvRuntimeHandlers skips MeCab warmup when all POS-dependent annota } } + const fixture = createDefaultMpvFixture(); const composed = composeMpvRuntimeHandlers< FakeMpvClient, { isKnownWord: (text: string) => boolean }, { text: string } >({ - bindMpvMainEventHandlersMainDeps: { - appState: { - initialArgs: null, - overlayRuntimeInitialized: true, - mpvClient: null, - immersionTracker: null, - subtitleTimingTracker: null, - currentSubText: '', - currentSubAssText: '', - playbackPaused: null, - previousSecondarySubVisibility: null, - }, - getQuitOnDisconnectArmed: () => false, - scheduleQuitCheck: () => {}, - quitApp: () => {}, - reportJellyfinRemoteStopped: () => {}, - syncOverlayMpvSubtitleSuppression: () => {}, - maybeRunAnilistPostWatchUpdate: async () => {}, - logSubtitleTimingError: () => {}, - broadcastToOverlayWindows: () => {}, - onSubtitleChange: () => {}, - refreshDiscordPresence: () => {}, - ensureImmersionTrackerInitialized: () => {}, - updateCurrentMediaPath: () => {}, - restoreMpvSubVisibility: () => {}, - getCurrentAnilistMediaKey: () => null, - resetAnilistMediaTracking: () => {}, - maybeProbeAnilistDuration: () => {}, - ensureAnilistMediaGuess: () => {}, - syncImmersionMediaState: () => {}, - updateCurrentMediaTitle: () => {}, - resetAnilistMediaGuessState: () => {}, - reportJellyfinRemoteProgress: () => {}, - updateSubtitleRenderMetrics: () => {}, - }, + ...fixture, mpvClientRuntimeServiceFactoryMainDeps: { + ...fixture.mpvClientRuntimeServiceFactoryMainDeps, createClient: FakeMpvClient, - getSocketPath: () => '/tmp/mpv.sock', - getResolvedConfig: () => ({ auto_start_overlay: false }), isAutoStartOverlayEnabled: () => true, - setOverlayVisible: () => {}, - isVisibleOverlayVisible: () => false, - getReconnectTimer: () => null, - setReconnectTimer: () => {}, - }, - updateMpvSubtitleRenderMetricsMainDeps: { - getCurrentMetrics: () => BASE_METRICS, - setCurrentMetrics: () => {}, - applyPatch: (current, patch) => ({ next: { ...current, ...patch }, changed: true }), - broadcastMetrics: () => {}, }, tokenizer: { - buildTokenizerDepsMainDeps: { - getYomitanExt: () => null, - getYomitanParserWindow: () => null, - setYomitanParserWindow: () => {}, - getYomitanParserReadyPromise: () => null, - setYomitanParserReadyPromise: () => {}, - getYomitanParserInitPromise: () => null, - setYomitanParserInitPromise: () => {}, - isKnownWord: () => false, - recordLookup: () => {}, - getKnownWordMatchMode: () => 'headword', - getNPlusOneEnabled: () => false, - getMinSentenceWordsForNPlusOne: () => 3, - getJlptLevel: () => null, - getJlptEnabled: () => false, - getFrequencyDictionaryEnabled: () => false, - getFrequencyDictionaryMatchMode: () => 'headword', - getFrequencyRank: () => null, - getYomitanGroupDebugEnabled: () => false, - getMecabTokenizer: () => null, - }, - createTokenizerRuntimeDeps: () => ({ isKnownWord: () => false }), - tokenizeSubtitle: async (text) => ({ text }), + ...fixture.tokenizer, createMecabTokenizerAndCheckMainDeps: { getMecabTokenizer: () => mecabTokenizer, setMecabTokenizer: (next) => { @@ -358,29 +358,6 @@ test('composeMpvRuntimeHandlers skips MeCab warmup when all POS-dependent annota calls.push('check-mecab'); }, }, - prewarmSubtitleDictionariesMainDeps: { - ensureJlptDictionaryLookup: async () => {}, - ensureFrequencyDictionaryLookup: async () => {}, - }, - }, - warmups: { - launchBackgroundWarmupTaskMainDeps: { - now: () => 0, - logDebug: () => {}, - logWarn: () => {}, - }, - startBackgroundWarmupsMainDeps: { - getStarted: () => false, - setStarted: () => {}, - isTexthookerOnlyMode: () => false, - ensureYomitanExtensionLoaded: async () => {}, - shouldWarmupMecab: () => false, - shouldWarmupYomitanExtension: () => false, - shouldWarmupSubtitleDictionaries: () => false, - shouldWarmupJellyfinRemoteSession: () => false, - shouldAutoConnectJellyfinRemote: () => false, - startJellyfinRemoteSession: async () => {}, - }, }, }); @@ -395,98 +372,19 @@ test('composeMpvRuntimeHandlers runs tokenization warmup once across sequential let prewarmFrequencyCalls = 0; const tokenizeCalls: string[] = []; + const fixture = createDefaultMpvFixture(); const composed = composeMpvRuntimeHandlers< { connect: () => void; on: () => void }, { isKnownWord: () => boolean }, { text: string } >({ - bindMpvMainEventHandlersMainDeps: { - appState: { - initialArgs: null, - overlayRuntimeInitialized: true, - mpvClient: null, - immersionTracker: null, - subtitleTimingTracker: null, - currentSubText: '', - currentSubAssText: '', - playbackPaused: null, - previousSecondarySubVisibility: null, - }, - getQuitOnDisconnectArmed: () => false, - scheduleQuitCheck: () => {}, - quitApp: () => {}, - reportJellyfinRemoteStopped: () => {}, - syncOverlayMpvSubtitleSuppression: () => {}, - maybeRunAnilistPostWatchUpdate: async () => {}, - logSubtitleTimingError: () => {}, - broadcastToOverlayWindows: () => {}, - onSubtitleChange: () => {}, - refreshDiscordPresence: () => {}, - ensureImmersionTrackerInitialized: () => {}, - updateCurrentMediaPath: () => {}, - restoreMpvSubVisibility: () => {}, - getCurrentAnilistMediaKey: () => null, - resetAnilistMediaTracking: () => {}, - maybeProbeAnilistDuration: () => {}, - ensureAnilistMediaGuess: () => {}, - syncImmersionMediaState: () => {}, - updateCurrentMediaTitle: () => {}, - resetAnilistMediaGuessState: () => {}, - reportJellyfinRemoteProgress: () => {}, - updateSubtitleRenderMetrics: () => {}, - }, - mpvClientRuntimeServiceFactoryMainDeps: { - createClient: class { - connect(): void {} - on(): void {} - }, - getSocketPath: () => '/tmp/mpv.sock', - getResolvedConfig: () => ({ auto_start_overlay: false }), - isAutoStartOverlayEnabled: () => false, - setOverlayVisible: () => {}, - isVisibleOverlayVisible: () => false, - getReconnectTimer: () => null, - setReconnectTimer: () => {}, - }, - updateMpvSubtitleRenderMetricsMainDeps: { - getCurrentMetrics: () => BASE_METRICS, - setCurrentMetrics: () => {}, - applyPatch: (current, patch) => ({ next: { ...current, ...patch }, changed: true }), - broadcastMetrics: () => {}, - }, + ...fixture, tokenizer: { - buildTokenizerDepsMainDeps: { - getYomitanExt: () => null, - getYomitanParserWindow: () => null, - setYomitanParserWindow: () => {}, - getYomitanParserReadyPromise: () => null, - setYomitanParserReadyPromise: () => {}, - getYomitanParserInitPromise: () => null, - setYomitanParserInitPromise: () => {}, - isKnownWord: () => false, - recordLookup: () => {}, - getKnownWordMatchMode: () => 'headword', - getNPlusOneEnabled: () => false, - getMinSentenceWordsForNPlusOne: () => 3, - getJlptLevel: () => null, - getJlptEnabled: () => false, - getFrequencyDictionaryEnabled: () => false, - getFrequencyDictionaryMatchMode: () => 'headword', - getFrequencyRank: () => null, - getYomitanGroupDebugEnabled: () => false, - getMecabTokenizer: () => null, - }, - createTokenizerRuntimeDeps: () => ({ isKnownWord: () => false }), + ...fixture.tokenizer, tokenizeSubtitle: async (text) => { tokenizeCalls.push(text); return { text }; }, - createMecabTokenizerAndCheckMainDeps: { - getMecabTokenizer: () => null, - setMecabTokenizer: () => {}, - createMecabTokenizer: () => ({ id: 'mecab' }), - checkAvailability: async () => {}, - }, prewarmSubtitleDictionariesMainDeps: { ensureJlptDictionaryLookup: async () => { prewarmJlptCalls += 1; @@ -497,24 +395,12 @@ test('composeMpvRuntimeHandlers runs tokenization warmup once across sequential }, }, warmups: { - launchBackgroundWarmupTaskMainDeps: { - now: () => 0, - logDebug: () => {}, - logWarn: () => {}, - }, + ...fixture.warmups, startBackgroundWarmupsMainDeps: { - getStarted: () => false, - setStarted: () => {}, - isTexthookerOnlyMode: () => false, + ...fixture.warmups.startBackgroundWarmupsMainDeps, ensureYomitanExtensionLoaded: async () => { yomitanWarmupCalls += 1; }, - shouldWarmupMecab: () => false, - shouldWarmupYomitanExtension: () => false, - shouldWarmupSubtitleDictionaries: () => false, - shouldWarmupJellyfinRemoteSession: () => false, - shouldAutoConnectJellyfinRemote: () => false, - startJellyfinRemoteSession: async () => {}, }, }, }); @@ -534,93 +420,23 @@ test('composeMpvRuntimeHandlers does not block first tokenization on dictionary const mecabDeferred = createDeferred(); let tokenizeResolved = false; + const fixture = createDefaultMpvFixture(); const composed = composeMpvRuntimeHandlers< { connect: () => void; on: () => void }, { isKnownWord: () => boolean }, { text: string } >({ - bindMpvMainEventHandlersMainDeps: { - appState: { - initialArgs: null, - overlayRuntimeInitialized: true, - mpvClient: null, - immersionTracker: null, - subtitleTimingTracker: null, - currentSubText: '', - currentSubAssText: '', - playbackPaused: null, - previousSecondarySubVisibility: null, - }, - getQuitOnDisconnectArmed: () => false, - scheduleQuitCheck: () => {}, - quitApp: () => {}, - reportJellyfinRemoteStopped: () => {}, - syncOverlayMpvSubtitleSuppression: () => {}, - maybeRunAnilistPostWatchUpdate: async () => {}, - logSubtitleTimingError: () => {}, - broadcastToOverlayWindows: () => {}, - onSubtitleChange: () => {}, - refreshDiscordPresence: () => {}, - ensureImmersionTrackerInitialized: () => {}, - updateCurrentMediaPath: () => {}, - restoreMpvSubVisibility: () => {}, - getCurrentAnilistMediaKey: () => null, - resetAnilistMediaTracking: () => {}, - maybeProbeAnilistDuration: () => {}, - ensureAnilistMediaGuess: () => {}, - syncImmersionMediaState: () => {}, - updateCurrentMediaTitle: () => {}, - resetAnilistMediaGuessState: () => {}, - reportJellyfinRemoteProgress: () => {}, - updateSubtitleRenderMetrics: () => {}, - }, - mpvClientRuntimeServiceFactoryMainDeps: { - createClient: class { - connect(): void {} - on(): void {} - }, - getSocketPath: () => '/tmp/mpv.sock', - getResolvedConfig: () => ({ auto_start_overlay: false }), - isAutoStartOverlayEnabled: () => false, - setOverlayVisible: () => {}, - isVisibleOverlayVisible: () => false, - getReconnectTimer: () => null, - setReconnectTimer: () => {}, - }, - updateMpvSubtitleRenderMetricsMainDeps: { - getCurrentMetrics: () => BASE_METRICS, - setCurrentMetrics: () => {}, - applyPatch: (current, patch) => ({ next: { ...current, ...patch }, changed: true }), - broadcastMetrics: () => {}, - }, + ...fixture, tokenizer: { + ...fixture.tokenizer, buildTokenizerDepsMainDeps: { - getYomitanExt: () => null, - getYomitanParserWindow: () => null, - setYomitanParserWindow: () => {}, - getYomitanParserReadyPromise: () => null, - setYomitanParserReadyPromise: () => {}, - getYomitanParserInitPromise: () => null, - setYomitanParserInitPromise: () => {}, - isKnownWord: () => false, - recordLookup: () => {}, - getKnownWordMatchMode: () => 'headword', + ...fixture.tokenizer.buildTokenizerDepsMainDeps, getNPlusOneEnabled: () => true, - getMinSentenceWordsForNPlusOne: () => 3, - getJlptLevel: () => null, getJlptEnabled: () => true, getFrequencyDictionaryEnabled: () => true, - getFrequencyDictionaryMatchMode: () => 'headword', - getFrequencyRank: () => null, - getYomitanGroupDebugEnabled: () => false, - getMecabTokenizer: () => null, }, - createTokenizerRuntimeDeps: () => ({ isKnownWord: () => false }), - tokenizeSubtitle: async (text) => ({ text }), createMecabTokenizerAndCheckMainDeps: { - getMecabTokenizer: () => null, - setMecabTokenizer: () => {}, - createMecabTokenizer: () => ({ id: 'mecab' }), + ...fixture.tokenizer.createMecabTokenizerAndCheckMainDeps, checkAvailability: async () => mecabDeferred.promise, }, prewarmSubtitleDictionariesMainDeps: { @@ -628,25 +444,6 @@ test('composeMpvRuntimeHandlers does not block first tokenization on dictionary ensureFrequencyDictionaryLookup: async () => frequencyDeferred.promise, }, }, - warmups: { - launchBackgroundWarmupTaskMainDeps: { - now: () => 0, - logDebug: () => {}, - logWarn: () => {}, - }, - startBackgroundWarmupsMainDeps: { - getStarted: () => false, - setStarted: () => {}, - isTexthookerOnlyMode: () => false, - ensureYomitanExtensionLoaded: async () => undefined, - shouldWarmupMecab: () => false, - shouldWarmupYomitanExtension: () => false, - shouldWarmupSubtitleDictionaries: () => false, - shouldWarmupJellyfinRemoteSession: () => false, - shouldAutoConnectJellyfinRemote: () => false, - startJellyfinRemoteSession: async () => {}, - }, - }, }); const tokenizePromise = composed.tokenizeSubtitle('first line').then(() => { @@ -667,86 +464,19 @@ test('composeMpvRuntimeHandlers shows annotation loading OSD after tokenization- const frequencyDeferred = createDeferred(); const osdMessages: string[] = []; + const fixture = createDefaultMpvFixture(); const composed = composeMpvRuntimeHandlers< { connect: () => void; on: () => void }, { onTokenizationReady?: (text: string) => void }, { text: string } >({ - bindMpvMainEventHandlersMainDeps: { - appState: { - initialArgs: null, - overlayRuntimeInitialized: true, - mpvClient: null, - immersionTracker: null, - subtitleTimingTracker: null, - currentSubText: '', - currentSubAssText: '', - playbackPaused: null, - previousSecondarySubVisibility: null, - }, - getQuitOnDisconnectArmed: () => false, - scheduleQuitCheck: () => {}, - quitApp: () => {}, - reportJellyfinRemoteStopped: () => {}, - syncOverlayMpvSubtitleSuppression: () => {}, - maybeRunAnilistPostWatchUpdate: async () => {}, - logSubtitleTimingError: () => {}, - broadcastToOverlayWindows: () => {}, - onSubtitleChange: () => {}, - refreshDiscordPresence: () => {}, - ensureImmersionTrackerInitialized: () => {}, - updateCurrentMediaPath: () => {}, - restoreMpvSubVisibility: () => {}, - getCurrentAnilistMediaKey: () => null, - resetAnilistMediaTracking: () => {}, - maybeProbeAnilistDuration: () => {}, - ensureAnilistMediaGuess: () => {}, - syncImmersionMediaState: () => {}, - updateCurrentMediaTitle: () => {}, - resetAnilistMediaGuessState: () => {}, - reportJellyfinRemoteProgress: () => {}, - updateSubtitleRenderMetrics: () => {}, - }, - mpvClientRuntimeServiceFactoryMainDeps: { - createClient: class { - connect(): void {} - on(): void {} - }, - getSocketPath: () => '/tmp/mpv.sock', - getResolvedConfig: () => ({ auto_start_overlay: false }), - isAutoStartOverlayEnabled: () => false, - setOverlayVisible: () => {}, - isVisibleOverlayVisible: () => false, - getReconnectTimer: () => null, - setReconnectTimer: () => {}, - }, - updateMpvSubtitleRenderMetricsMainDeps: { - getCurrentMetrics: () => BASE_METRICS, - setCurrentMetrics: () => {}, - applyPatch: (current, patch) => ({ next: { ...current, ...patch }, changed: true }), - broadcastMetrics: () => {}, - }, + ...fixture, tokenizer: { + ...fixture.tokenizer, buildTokenizerDepsMainDeps: { - getYomitanExt: () => null, - getYomitanParserWindow: () => null, - setYomitanParserWindow: () => {}, - getYomitanParserReadyPromise: () => null, - setYomitanParserReadyPromise: () => {}, - getYomitanParserInitPromise: () => null, - setYomitanParserInitPromise: () => {}, - isKnownWord: () => false, - recordLookup: () => {}, - getKnownWordMatchMode: () => 'headword', - getNPlusOneEnabled: () => false, - getMinSentenceWordsForNPlusOne: () => 3, - getJlptLevel: () => null, + ...fixture.tokenizer.buildTokenizerDepsMainDeps, getJlptEnabled: () => true, getFrequencyDictionaryEnabled: () => true, - getFrequencyDictionaryMatchMode: () => 'headword', - getFrequencyRank: () => null, - getYomitanGroupDebugEnabled: () => false, - getMecabTokenizer: () => null, }, createTokenizerRuntimeDeps: (deps) => deps as unknown as { onTokenizationReady?: (text: string) => void }, @@ -754,12 +484,6 @@ test('composeMpvRuntimeHandlers shows annotation loading OSD after tokenization- deps.onTokenizationReady?.(text); return { text }; }, - createMecabTokenizerAndCheckMainDeps: { - getMecabTokenizer: () => null, - setMecabTokenizer: () => {}, - createMecabTokenizer: () => ({ id: 'mecab' }), - checkAvailability: async () => {}, - }, prewarmSubtitleDictionariesMainDeps: { ensureJlptDictionaryLookup: async () => jlptDeferred.promise, ensureFrequencyDictionaryLookup: async () => frequencyDeferred.promise, @@ -768,25 +492,6 @@ test('composeMpvRuntimeHandlers shows annotation loading OSD after tokenization- }, }, }, - warmups: { - launchBackgroundWarmupTaskMainDeps: { - now: () => 0, - logDebug: () => {}, - logWarn: () => {}, - }, - startBackgroundWarmupsMainDeps: { - getStarted: () => false, - setStarted: () => {}, - isTexthookerOnlyMode: () => false, - ensureYomitanExtensionLoaded: async () => undefined, - shouldWarmupMecab: () => false, - shouldWarmupYomitanExtension: () => false, - shouldWarmupSubtitleDictionaries: () => false, - shouldWarmupJellyfinRemoteSession: () => false, - shouldAutoConnectJellyfinRemote: () => false, - startJellyfinRemoteSession: async () => {}, - }, - }, }); const warmupPromise = composed.startTokenizationWarmups(); @@ -814,89 +519,22 @@ test('composeMpvRuntimeHandlers reuses completed background tokenization warmups let frequencyWarmupCalls = 0; let mecabTokenizer: { tokenize: () => Promise } | null = null; + const fixture = createDefaultMpvFixture(); const composed = composeMpvRuntimeHandlers< { connect: () => void; on: () => void }, { isKnownWord: () => boolean }, { text: string } >({ - bindMpvMainEventHandlersMainDeps: { - appState: { - initialArgs: null, - overlayRuntimeInitialized: true, - mpvClient: null, - immersionTracker: null, - subtitleTimingTracker: null, - currentSubText: '', - currentSubAssText: '', - playbackPaused: null, - previousSecondarySubVisibility: null, - }, - getQuitOnDisconnectArmed: () => false, - scheduleQuitCheck: () => {}, - quitApp: () => {}, - reportJellyfinRemoteStopped: () => {}, - syncOverlayMpvSubtitleSuppression: () => {}, - maybeRunAnilistPostWatchUpdate: async () => {}, - logSubtitleTimingError: () => {}, - broadcastToOverlayWindows: () => {}, - onSubtitleChange: () => {}, - refreshDiscordPresence: () => {}, - ensureImmersionTrackerInitialized: () => {}, - updateCurrentMediaPath: () => {}, - restoreMpvSubVisibility: () => {}, - getCurrentAnilistMediaKey: () => null, - resetAnilistMediaTracking: () => {}, - maybeProbeAnilistDuration: () => {}, - ensureAnilistMediaGuess: () => {}, - syncImmersionMediaState: () => {}, - updateCurrentMediaTitle: () => {}, - resetAnilistMediaGuessState: () => {}, - reportJellyfinRemoteProgress: () => {}, - updateSubtitleRenderMetrics: () => {}, - }, - mpvClientRuntimeServiceFactoryMainDeps: { - createClient: class { - connect(): void {} - on(): void {} - }, - getSocketPath: () => '/tmp/mpv.sock', - getResolvedConfig: () => ({ auto_start_overlay: false }), - isAutoStartOverlayEnabled: () => false, - setOverlayVisible: () => {}, - isVisibleOverlayVisible: () => false, - getReconnectTimer: () => null, - setReconnectTimer: () => {}, - }, - updateMpvSubtitleRenderMetricsMainDeps: { - getCurrentMetrics: () => BASE_METRICS, - setCurrentMetrics: () => {}, - applyPatch: (current, patch) => ({ next: { ...current, ...patch }, changed: true }), - broadcastMetrics: () => {}, - }, + ...fixture, tokenizer: { + ...fixture.tokenizer, buildTokenizerDepsMainDeps: { - getYomitanExt: () => null, - getYomitanParserWindow: () => null, - setYomitanParserWindow: () => {}, - getYomitanParserReadyPromise: () => null, - setYomitanParserReadyPromise: () => {}, - getYomitanParserInitPromise: () => null, - setYomitanParserInitPromise: () => {}, - isKnownWord: () => false, - recordLookup: () => {}, - getKnownWordMatchMode: () => 'headword', + ...fixture.tokenizer.buildTokenizerDepsMainDeps, getNPlusOneEnabled: () => true, - getMinSentenceWordsForNPlusOne: () => 3, - getJlptLevel: () => null, getJlptEnabled: () => true, getFrequencyDictionaryEnabled: () => true, - getFrequencyDictionaryMatchMode: () => 'headword', - getFrequencyRank: () => null, - getYomitanGroupDebugEnabled: () => false, getMecabTokenizer: () => mecabTokenizer, }, - createTokenizerRuntimeDeps: () => ({ isKnownWord: () => false }), - tokenizeSubtitle: async (text) => ({ text }), createMecabTokenizerAndCheckMainDeps: { getMecabTokenizer: () => mecabTokenizer, setMecabTokenizer: (next) => { @@ -917,26 +555,19 @@ test('composeMpvRuntimeHandlers reuses completed background tokenization warmups }, }, warmups: { - launchBackgroundWarmupTaskMainDeps: { - now: () => 0, - logDebug: () => {}, - logWarn: () => {}, - }, + ...fixture.warmups, startBackgroundWarmupsMainDeps: { + ...fixture.warmups.startBackgroundWarmupsMainDeps, getStarted: () => started, setStarted: (next) => { started = next; }, - isTexthookerOnlyMode: () => false, ensureYomitanExtensionLoaded: async () => { yomitanWarmupCalls += 1; }, shouldWarmupMecab: () => true, shouldWarmupYomitanExtension: () => true, shouldWarmupSubtitleDictionaries: () => true, - shouldWarmupJellyfinRemoteSession: () => false, - shouldAutoConnectJellyfinRemote: () => false, - startJellyfinRemoteSession: async () => {}, }, }, }); diff --git a/src/main/runtime/composers/overlay-visibility-runtime-composer.test.ts b/src/main/runtime/composers/overlay-visibility-runtime-composer.test.ts new file mode 100644 index 0000000..59f7e67 --- /dev/null +++ b/src/main/runtime/composers/overlay-visibility-runtime-composer.test.ts @@ -0,0 +1,56 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { composeOverlayVisibilityRuntime } from './overlay-visibility-runtime-composer'; + +test('composeOverlayVisibilityRuntime returns overlay visibility handlers', () => { + const calls: string[] = []; + const composed = composeOverlayVisibilityRuntime({ + overlayVisibilityRuntime: { + updateVisibleOverlayVisibility: () => { + calls.push('updateVisibleOverlayVisibility'); + }, + }, + restorePreviousSecondarySubVisibilityMainDeps: { + getMpvClient: () => null, + }, + broadcastRuntimeOptionsChangedMainDeps: { + broadcastRuntimeOptionsChangedRuntime: () => { + calls.push('broadcastRuntimeOptionsChangedRuntime'); + }, + getRuntimeOptionsState: () => [], + broadcastToOverlayWindows: () => {}, + }, + sendToActiveOverlayWindowMainDeps: { + sendToActiveOverlayWindowRuntime: () => true, + }, + setOverlayDebugVisualizationEnabledMainDeps: { + setOverlayDebugVisualizationEnabledRuntime: () => {}, + getCurrentEnabled: () => false, + setCurrentEnabled: () => {}, + }, + openRuntimeOptionsPaletteMainDeps: { + openRuntimeOptionsPaletteRuntime: () => { + calls.push('openRuntimeOptionsPaletteRuntime'); + }, + }, + }); + + assert.equal(typeof composed.updateVisibleOverlayVisibility, 'function'); + assert.equal(typeof composed.restorePreviousSecondarySubVisibility, 'function'); + assert.equal(typeof composed.broadcastRuntimeOptionsChanged, 'function'); + assert.equal(typeof composed.sendToActiveOverlayWindow, 'function'); + assert.equal(typeof composed.setOverlayDebugVisualizationEnabled, 'function'); + assert.equal(typeof composed.openRuntimeOptionsPalette, 'function'); + + // updateVisibleOverlayVisibility passes through to the injected runtime dep + composed.updateVisibleOverlayVisibility(); + assert.deepEqual(calls, ['updateVisibleOverlayVisibility']); + + // openRuntimeOptionsPalette forwards to the injected runtime dep + composed.openRuntimeOptionsPalette(); + assert.deepEqual(calls, ['updateVisibleOverlayVisibility', 'openRuntimeOptionsPaletteRuntime']); + + // broadcastRuntimeOptionsChanged forwards to the injected runtime dep + composed.broadcastRuntimeOptionsChanged(); + assert.ok(calls.includes('broadcastRuntimeOptionsChangedRuntime')); +}); diff --git a/src/main/runtime/composers/overlay-visibility-runtime-composer.ts b/src/main/runtime/composers/overlay-visibility-runtime-composer.ts new file mode 100644 index 0000000..b2daeca --- /dev/null +++ b/src/main/runtime/composers/overlay-visibility-runtime-composer.ts @@ -0,0 +1,88 @@ +import { + createBroadcastRuntimeOptionsChangedHandler, + createOpenRuntimeOptionsPaletteHandler, + createRestorePreviousSecondarySubVisibilityHandler, + createSendToActiveOverlayWindowHandler, + createSetOverlayDebugVisualizationEnabledHandler, +} from '../overlay-runtime-main-actions'; +import { + createBuildBroadcastRuntimeOptionsChangedMainDepsHandler, + createBuildOpenRuntimeOptionsPaletteMainDepsHandler, + createBuildRestorePreviousSecondarySubVisibilityMainDepsHandler, + createBuildSendToActiveOverlayWindowMainDepsHandler, + createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler, +} from '../overlay-runtime-main-actions-main-deps'; +import type { ComposerInputs, ComposerOutputs } from './contracts'; + +type RestorePreviousSecondarySubVisibilityMainDeps = Parameters< + typeof createBuildRestorePreviousSecondarySubVisibilityMainDepsHandler +>[0]; +type BroadcastRuntimeOptionsChangedMainDeps = Parameters< + typeof createBuildBroadcastRuntimeOptionsChangedMainDepsHandler +>[0]; +type SendToActiveOverlayWindowMainDeps = Parameters< + typeof createBuildSendToActiveOverlayWindowMainDepsHandler +>[0]; +type SetOverlayDebugVisualizationEnabledMainDeps = Parameters< + typeof createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler +>[0]; +type OpenRuntimeOptionsPaletteMainDeps = Parameters< + typeof createBuildOpenRuntimeOptionsPaletteMainDepsHandler +>[0]; + +export type OverlayVisibilityRuntimeComposerOptions = ComposerInputs<{ + overlayVisibilityRuntime: { + updateVisibleOverlayVisibility: () => void; + }; + restorePreviousSecondarySubVisibilityMainDeps: RestorePreviousSecondarySubVisibilityMainDeps; + broadcastRuntimeOptionsChangedMainDeps: BroadcastRuntimeOptionsChangedMainDeps; + sendToActiveOverlayWindowMainDeps: SendToActiveOverlayWindowMainDeps; + setOverlayDebugVisualizationEnabledMainDeps: SetOverlayDebugVisualizationEnabledMainDeps; + openRuntimeOptionsPaletteMainDeps: OpenRuntimeOptionsPaletteMainDeps; +}>; + +export type OverlayVisibilityRuntimeComposerResult = ComposerOutputs<{ + updateVisibleOverlayVisibility: () => void; + restorePreviousSecondarySubVisibility: ReturnType< + typeof createRestorePreviousSecondarySubVisibilityHandler + >; + broadcastRuntimeOptionsChanged: ReturnType; + sendToActiveOverlayWindow: ReturnType; + setOverlayDebugVisualizationEnabled: ReturnType< + typeof createSetOverlayDebugVisualizationEnabledHandler + >; + openRuntimeOptionsPalette: ReturnType; +}>; + +export function composeOverlayVisibilityRuntime( + options: OverlayVisibilityRuntimeComposerOptions, +): OverlayVisibilityRuntimeComposerResult { + return { + updateVisibleOverlayVisibility: () => options.overlayVisibilityRuntime.updateVisibleOverlayVisibility(), + restorePreviousSecondarySubVisibility: createRestorePreviousSecondarySubVisibilityHandler( + createBuildRestorePreviousSecondarySubVisibilityMainDepsHandler( + options.restorePreviousSecondarySubVisibilityMainDeps, + )(), + ), + broadcastRuntimeOptionsChanged: createBroadcastRuntimeOptionsChangedHandler( + createBuildBroadcastRuntimeOptionsChangedMainDepsHandler( + options.broadcastRuntimeOptionsChangedMainDeps, + )(), + ), + sendToActiveOverlayWindow: createSendToActiveOverlayWindowHandler( + createBuildSendToActiveOverlayWindowMainDepsHandler( + options.sendToActiveOverlayWindowMainDeps, + )(), + ), + setOverlayDebugVisualizationEnabled: createSetOverlayDebugVisualizationEnabledHandler( + createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler( + options.setOverlayDebugVisualizationEnabledMainDeps, + )(), + ), + openRuntimeOptionsPalette: createOpenRuntimeOptionsPaletteHandler( + createBuildOpenRuntimeOptionsPaletteMainDepsHandler( + options.openRuntimeOptionsPaletteMainDeps, + )(), + ), + }; +} diff --git a/src/main/runtime/composers/shortcuts-runtime-composer.test.ts b/src/main/runtime/composers/shortcuts-runtime-composer.test.ts index a93b547..ad87e36 100644 --- a/src/main/runtime/composers/shortcuts-runtime-composer.test.ts +++ b/src/main/runtime/composers/shortcuts-runtime-composer.test.ts @@ -3,6 +3,7 @@ import test from 'node:test'; import { composeShortcutRuntimes } from './shortcuts-runtime-composer'; test('composeShortcutRuntimes returns callable shortcut runtime handlers', () => { + const calls: string[] = []; const composed = composeShortcutRuntimes({ globalShortcuts: { getConfiguredShortcutsMainDeps: { @@ -39,9 +40,13 @@ test('composeShortcutRuntimes returns callable shortcut runtime handlers', () => }, overlayShortcutsRuntimeMainDeps: { overlayShortcutsRuntime: { - registerOverlayShortcuts: () => {}, + registerOverlayShortcuts: () => { + calls.push('registerOverlayShortcuts'); + }, unregisterOverlayShortcuts: () => {}, - syncOverlayShortcuts: () => {}, + syncOverlayShortcuts: () => { + calls.push('syncOverlayShortcuts'); + }, refreshOverlayShortcuts: () => {}, }, }, @@ -58,4 +63,12 @@ test('composeShortcutRuntimes returns callable shortcut runtime handlers', () => assert.equal(typeof composed.unregisterOverlayShortcuts, 'function'); assert.equal(typeof composed.syncOverlayShortcuts, 'function'); assert.equal(typeof composed.refreshOverlayShortcuts, 'function'); + + // registerOverlayShortcuts forwards to the injected overlayShortcutsRuntime dep + composed.registerOverlayShortcuts(); + assert.deepEqual(calls, ['registerOverlayShortcuts']); + + // syncOverlayShortcuts forwards to the injected overlayShortcutsRuntime dep + composed.syncOverlayShortcuts(); + assert.deepEqual(calls, ['registerOverlayShortcuts', 'syncOverlayShortcuts']); }); diff --git a/src/main/runtime/composers/startup-lifecycle-composer.test.ts b/src/main/runtime/composers/startup-lifecycle-composer.test.ts index 60eb4a7..0c2cf22 100644 --- a/src/main/runtime/composers/startup-lifecycle-composer.test.ts +++ b/src/main/runtime/composers/startup-lifecycle-composer.test.ts @@ -3,6 +3,7 @@ import test from 'node:test'; import { composeStartupLifecycleHandlers } from './startup-lifecycle-composer'; test('composeStartupLifecycleHandlers returns callable startup lifecycle handlers', () => { + const calls: string[] = []; const composed = composeStartupLifecycleHandlers({ registerProtocolUrlHandlersMainDeps: { registerOpenUrl: () => {}, @@ -51,7 +52,9 @@ test('composeStartupLifecycleHandlers returns callable startup lifecycle handler getAllWindowCount: () => 0, }, restoreWindowsOnActivateMainDeps: { - createMainWindow: () => {}, + createMainWindow: () => { + calls.push('createMainWindow'); + }, updateVisibleOverlayVisibility: () => {}, syncOverlayMpvSubtitleSuppression: () => {}, }, @@ -61,4 +64,11 @@ test('composeStartupLifecycleHandlers returns callable startup lifecycle handler assert.equal(typeof composed.onWillQuitCleanup, 'function'); assert.equal(typeof composed.shouldRestoreWindowsOnActivate, 'function'); assert.equal(typeof composed.restoreWindowsOnActivate, 'function'); + + // shouldRestoreWindowsOnActivate returns false when overlay runtime is not initialized + assert.equal(composed.shouldRestoreWindowsOnActivate(), false); + + // restoreWindowsOnActivate invokes the injected createMainWindow dep + composed.restoreWindowsOnActivate(); + assert.deepEqual(calls, ['createMainWindow']); }); diff --git a/src/main/runtime/discord-presence-runtime.test.ts b/src/main/runtime/discord-presence-runtime.test.ts new file mode 100644 index 0000000..45d089f --- /dev/null +++ b/src/main/runtime/discord-presence-runtime.test.ts @@ -0,0 +1,76 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createDiscordPresenceRuntime } from './discord-presence-runtime'; + +test('discord presence runtime refreshes duration and publishes the current snapshot', async () => { + const snapshots: Array> = []; + let mediaDurationSec: number | null = null; + + const runtime = createDiscordPresenceRuntime({ + getDiscordPresenceService: () => ({ + publish: (snapshot: Record) => { + snapshots.push(snapshot); + }, + }), + isDiscordPresenceEnabled: () => true, + getMpvClient: () => + ({ + connected: true, + currentTimePos: 12, + requestProperty: async (name: string) => { + assert.equal(name, 'duration'); + return 42; + }, + }) as never, + getCurrentMediaTitle: () => 'Episode 1', + getCurrentMediaPath: () => '/media/episode-1.mkv', + getCurrentSubtitleText: () => '字幕', + getPlaybackPaused: () => false, + getFallbackMediaDurationSec: () => 90, + getSessionStartedAtMs: () => 1_000, + getMediaDurationSec: () => mediaDurationSec, + setMediaDurationSec: (next) => { + mediaDurationSec = next; + }, + }); + + await runtime.refreshDiscordPresenceMediaDuration(); + runtime.publishDiscordPresence(); + + assert.equal(mediaDurationSec, 42); + assert.deepEqual(snapshots, [ + { + mediaTitle: 'Episode 1', + mediaPath: '/media/episode-1.mkv', + subtitleText: '字幕', + currentTimeSec: 12, + mediaDurationSec: 42, + paused: false, + connected: true, + sessionStartedAtMs: 1_000, + }, + ]); +}); + +test('discord presence runtime skips publish when disabled or service missing', () => { + let published = false; + const runtime = createDiscordPresenceRuntime({ + getDiscordPresenceService: () => null, + isDiscordPresenceEnabled: () => false, + getMpvClient: () => null, + getCurrentMediaTitle: () => null, + getCurrentMediaPath: () => null, + getCurrentSubtitleText: () => '', + getPlaybackPaused: () => null, + getFallbackMediaDurationSec: () => null, + getSessionStartedAtMs: () => 0, + getMediaDurationSec: () => null, + setMediaDurationSec: () => { + published = true; + }, + }); + + runtime.publishDiscordPresence(); + + assert.equal(published, false); +}); diff --git a/src/main/runtime/discord-presence-runtime.ts b/src/main/runtime/discord-presence-runtime.ts new file mode 100644 index 0000000..fcb6704 --- /dev/null +++ b/src/main/runtime/discord-presence-runtime.ts @@ -0,0 +1,74 @@ +type DiscordPresenceServiceLike = { + publish: (snapshot: { + mediaTitle: string | null; + mediaPath: string | null; + subtitleText: string; + currentTimeSec: number | null; + mediaDurationSec: number | null; + paused: boolean | null; + connected: boolean; + sessionStartedAtMs: number; + }) => void; +}; + +type MpvClientLike = { + connected?: boolean; + currentTimePos?: number | null; + requestProperty: (name: string) => Promise; +}; + +export type DiscordPresenceRuntimeDeps = { + getDiscordPresenceService: () => DiscordPresenceServiceLike | null; + isDiscordPresenceEnabled: () => boolean; + getMpvClient: () => MpvClientLike | null; + getCurrentMediaTitle: () => string | null; + getCurrentMediaPath: () => string | null; + getCurrentSubtitleText: () => string; + getPlaybackPaused: () => boolean | null; + getFallbackMediaDurationSec: () => number | null; + getSessionStartedAtMs: () => number; + getMediaDurationSec: () => number | null; + setMediaDurationSec: (durationSec: number | null) => void; +}; + +export function createDiscordPresenceRuntime(deps: DiscordPresenceRuntimeDeps) { + const refreshDiscordPresenceMediaDuration = async (): Promise => { + const client = deps.getMpvClient(); + if (!client?.connected) { + return; + } + + try { + const value = await client.requestProperty('duration'); + const numeric = Number(value); + deps.setMediaDurationSec(Number.isFinite(numeric) && numeric > 0 ? numeric : null); + } catch { + deps.setMediaDurationSec(null); + } + }; + + const publishDiscordPresence = (): void => { + const discordPresenceService = deps.getDiscordPresenceService(); + if (!discordPresenceService || deps.isDiscordPresenceEnabled() !== true) { + return; + } + + void refreshDiscordPresenceMediaDuration(); + const client = deps.getMpvClient(); + discordPresenceService.publish({ + mediaTitle: deps.getCurrentMediaTitle(), + mediaPath: deps.getCurrentMediaPath(), + subtitleText: deps.getCurrentSubtitleText(), + currentTimeSec: client?.currentTimePos ?? null, + mediaDurationSec: deps.getMediaDurationSec() ?? deps.getFallbackMediaDurationSec(), + paused: deps.getPlaybackPaused(), + connected: Boolean(client?.connected), + sessionStartedAtMs: deps.getSessionStartedAtMs(), + }); + }; + + return { + refreshDiscordPresenceMediaDuration, + publishDiscordPresence, + }; +} diff --git a/src/main/runtime/discord-rpc-client.test.ts b/src/main/runtime/discord-rpc-client.test.ts new file mode 100644 index 0000000..2c185bb --- /dev/null +++ b/src/main/runtime/discord-rpc-client.test.ts @@ -0,0 +1,38 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; + +import { createDiscordRpcClient } from './discord-rpc-client'; + +test('createDiscordRpcClient forwards rich presence calls through client.user', async () => { + const calls: Array = []; + const rpcClient = createDiscordRpcClient('123456789012345678', { + createClient: () => + ({ + login: async () => { + calls.push('login'); + }, + user: { + setActivity: async () => { + calls.push('setActivity'); + }, + clearActivity: async () => { + calls.push('clearActivity'); + }, + }, + destroy: async () => { + calls.push('destroy'); + }, + }) as never, + }); + + await rpcClient.login(); + await rpcClient.setActivity({ + details: 'Title', + state: 'Playing 00:01 / 00:02', + startTimestamp: 1_700_000_000, + }); + await rpcClient.clearActivity(); + await rpcClient.destroy(); + + assert.deepEqual(calls, ['login', 'setActivity', 'clearActivity', 'destroy']); +}); diff --git a/src/main/runtime/discord-rpc-client.ts b/src/main/runtime/discord-rpc-client.ts new file mode 100644 index 0000000..b50aa08 --- /dev/null +++ b/src/main/runtime/discord-rpc-client.ts @@ -0,0 +1,49 @@ +import { Client } from '@xhayper/discord-rpc'; + +import type { DiscordActivityPayload } from '../../core/services/discord-presence'; + +type DiscordRpcClientUserLike = { + setActivity: (activity: DiscordActivityPayload) => Promise; + clearActivity: () => Promise; +}; + +type DiscordRpcRawClient = { + login: () => Promise; + destroy: () => Promise; + user?: DiscordRpcClientUserLike; +}; + +export type DiscordRpcClient = { + login: () => Promise; + setActivity: (activity: DiscordActivityPayload) => Promise; + clearActivity: () => Promise; + destroy: () => Promise; +}; + +function requireUser(client: DiscordRpcRawClient): DiscordRpcClientUserLike { + if (!client.user) { + throw new Error('Discord RPC client user is unavailable'); + } + + return client.user; +} + +export function wrapDiscordRpcClient(client: DiscordRpcRawClient): DiscordRpcClient { + return { + login: () => client.login(), + setActivity: (activity) => requireUser(client).setActivity(activity).then(() => undefined), + clearActivity: () => requireUser(client).clearActivity(), + destroy: () => client.destroy(), + }; +} + +export function createDiscordRpcClient( + clientId: string, + deps?: { createClient?: (options: { clientId: string; transport: { type: 'ipc' } }) => DiscordRpcRawClient }, +): DiscordRpcClient { + const client = + deps?.createClient?.({ clientId, transport: { type: 'ipc' } }) ?? + new Client({ clientId, transport: { type: 'ipc' } }); + + return wrapDiscordRpcClient(client); +} diff --git a/src/main/runtime/first-run-setup-plugin.test.ts b/src/main/runtime/first-run-setup-plugin.test.ts index 100c385..1eebb02 100644 --- a/src/main/runtime/first-run-setup-plugin.test.ts +++ b/src/main/runtime/first-run-setup-plugin.test.ts @@ -7,6 +7,7 @@ import { detectInstalledFirstRunPlugin, installFirstRunPluginToDefaultLocation, resolvePackagedFirstRunPluginAssets, + syncInstalledFirstRunPluginBinaryPath, } from './first-run-setup-plugin'; import { resolveDefaultMpvInstallPaths } from '../../shared/setup-state'; @@ -68,13 +69,17 @@ test('installFirstRunPluginToDefaultLocation installs plugin and backs up existi dirname: path.join(root, 'dist', 'main', 'runtime'), appPath: path.join(root, 'app'), resourcesPath, + binaryPath: '/Applications/SubMiner.app/Contents/MacOS/SubMiner', }); assert.equal(result.ok, true); assert.equal(result.pluginInstallStatus, 'installed'); assert.equal(detectInstalledFirstRunPlugin(installPaths), true); assert.equal(fs.readFileSync(installPaths.pluginEntrypointPath, 'utf8'), '-- packaged plugin'); - assert.equal(fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), 'configured=true\n'); + assert.equal( + fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), + 'configured=true\nbinary_path=/Applications/SubMiner.app/Contents/MacOS/SubMiner\n', + ); const scriptsDirEntries = fs.readdirSync(installPaths.scriptsDir); const scriptOptsEntries = fs.readdirSync(installPaths.scriptOptsDir); @@ -113,13 +118,17 @@ test('installFirstRunPluginToDefaultLocation installs plugin to Windows mpv defa dirname: path.join(root, 'dist', 'main', 'runtime'), appPath: path.join(root, 'app'), resourcesPath, + binaryPath: 'C:\\Program Files\\SubMiner\\SubMiner.exe', }); assert.equal(result.ok, true); assert.equal(result.pluginInstallStatus, 'installed'); assert.equal(detectInstalledFirstRunPlugin(installPaths), true); assert.equal(fs.readFileSync(installPaths.pluginEntrypointPath, 'utf8'), '-- packaged plugin'); - assert.equal(fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), 'configured=true\n'); + assert.equal( + fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), + 'configured=true\nbinary_path=C:\\Program Files\\SubMiner\\SubMiner.exe\n', + ); }); }); @@ -146,12 +155,70 @@ test('installFirstRunPluginToDefaultLocation rewrites Windows plugin socket_path dirname: path.join(root, 'dist', 'main', 'runtime'), appPath: path.join(root, 'app'), resourcesPath, + binaryPath: 'C:\\Program Files\\SubMiner\\SubMiner.exe', }); assert.equal(result.ok, true); assert.equal( fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), - 'binary_path=\nsocket_path=\\\\.\\pipe\\subminer-socket\n', + 'binary_path=C:\\Program Files\\SubMiner\\SubMiner.exe\nsocket_path=\\\\.\\pipe\\subminer-socket\n', + ); + }); +}); + +test('syncInstalledFirstRunPluginBinaryPath fills blank binary_path for existing installs', () => { + withTempDir((root) => { + const homeDir = path.join(root, 'home'); + const xdgConfigHome = path.join(root, 'xdg'); + const installPaths = resolveDefaultMpvInstallPaths('linux', homeDir, xdgConfigHome); + + fs.mkdirSync(path.dirname(installPaths.pluginConfigPath), { recursive: true }); + fs.writeFileSync(installPaths.pluginConfigPath, 'binary_path=\nsocket_path=/tmp/subminer-socket\n'); + + const result = syncInstalledFirstRunPluginBinaryPath({ + platform: 'linux', + homeDir, + xdgConfigHome, + binaryPath: '/Applications/SubMiner.app/Contents/MacOS/SubMiner', + }); + + assert.deepEqual(result, { + updated: true, + configPath: installPaths.pluginConfigPath, + }); + assert.equal( + fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), + 'binary_path=/Applications/SubMiner.app/Contents/MacOS/SubMiner\nsocket_path=/tmp/subminer-socket\n', + ); + }); +}); + +test('syncInstalledFirstRunPluginBinaryPath preserves explicit binary_path overrides', () => { + withTempDir((root) => { + const homeDir = path.join(root, 'home'); + const xdgConfigHome = path.join(root, 'xdg'); + const installPaths = resolveDefaultMpvInstallPaths('linux', homeDir, xdgConfigHome); + + fs.mkdirSync(path.dirname(installPaths.pluginConfigPath), { recursive: true }); + fs.writeFileSync( + installPaths.pluginConfigPath, + 'binary_path=/tmp/SubMiner/scripts/subminer-dev.sh\nsocket_path=/tmp/subminer-socket\n', + ); + + const result = syncInstalledFirstRunPluginBinaryPath({ + platform: 'linux', + homeDir, + xdgConfigHome, + binaryPath: '/Applications/SubMiner.app/Contents/MacOS/SubMiner', + }); + + assert.deepEqual(result, { + updated: false, + configPath: installPaths.pluginConfigPath, + }); + assert.equal( + fs.readFileSync(installPaths.pluginConfigPath, 'utf8'), + 'binary_path=/tmp/SubMiner/scripts/subminer-dev.sh\nsocket_path=/tmp/subminer-socket\n', ); }); }); diff --git a/src/main/runtime/first-run-setup-plugin.ts b/src/main/runtime/first-run-setup-plugin.ts index c00184b..8a24ca2 100644 --- a/src/main/runtime/first-run-setup-plugin.ts +++ b/src/main/runtime/first-run-setup-plugin.ts @@ -28,6 +28,43 @@ function rewriteInstalledWindowsPluginConfig(configPath: string): void { } } +function sanitizePluginConfigValue(value: string): string { + return value.replace(/[\r\n]/g, '').trim(); +} + +function upsertPluginConfigLine(content: string, key: string, value: string): string { + const normalizedValue = sanitizePluginConfigValue(value); + const line = `${key}=${normalizedValue}`; + const pattern = new RegExp(`^${key}=.*$`, 'm'); + if (pattern.test(content)) { + return content.replace(pattern, line); + } + + const suffix = content.endsWith('\n') || content.length === 0 ? '' : '\n'; + return `${content}${suffix}${line}\n`; +} + +function rewriteInstalledPluginBinaryPath(configPath: string, binaryPath: string): boolean { + const content = fs.readFileSync(configPath, 'utf8'); + const updated = upsertPluginConfigLine(content, 'binary_path', binaryPath); + if (updated === content) { + return false; + } + fs.writeFileSync(configPath, updated, 'utf8'); + return true; +} + +function readInstalledPluginBinaryPath(configPath: string): string | null { + const content = fs.readFileSync(configPath, 'utf8'); + const match = content.match(/^binary_path=(.*)$/m); + if (!match) { + return null; + } + const rawValue = match[1] ?? ''; + const value = sanitizePluginConfigValue(rawValue); + return value.length > 0 ? value : null; +} + export function resolvePackagedFirstRunPluginAssets(deps: { dirname: string; appPath: string; @@ -79,6 +116,7 @@ export function installFirstRunPluginToDefaultLocation(options: { dirname: string; appPath: string; resourcesPath: string; + binaryPath: string; }): PluginInstallResult { const installPaths = resolveDefaultMpvInstallPaths( options.platform, @@ -116,6 +154,7 @@ export function installFirstRunPluginToDefaultLocation(options: { backupExistingPath(installPaths.pluginConfigPath); fs.cpSync(assets.pluginDirSource, installPaths.pluginDir, { recursive: true }); fs.copyFileSync(assets.pluginConfigSource, installPaths.pluginConfigPath); + rewriteInstalledPluginBinaryPath(installPaths.pluginConfigPath, options.binaryPath); if (options.platform === 'win32') { rewriteInstalledWindowsPluginConfig(installPaths.pluginConfigPath); } @@ -127,3 +166,33 @@ export function installFirstRunPluginToDefaultLocation(options: { message: `Installed mpv plugin to ${installPaths.mpvConfigDir}.`, }; } + +export function syncInstalledFirstRunPluginBinaryPath(options: { + platform: NodeJS.Platform; + homeDir: string; + xdgConfigHome?: string; + binaryPath: string; +}): { updated: boolean; configPath: string | null } { + const installPaths = resolveDefaultMpvInstallPaths( + options.platform, + options.homeDir, + options.xdgConfigHome, + ); + if (!installPaths.supported || !fs.existsSync(installPaths.pluginConfigPath)) { + return { updated: false, configPath: null }; + } + + const configuredBinaryPath = readInstalledPluginBinaryPath(installPaths.pluginConfigPath); + if (configuredBinaryPath) { + return { updated: false, configPath: installPaths.pluginConfigPath }; + } + + const updated = rewriteInstalledPluginBinaryPath(installPaths.pluginConfigPath, options.binaryPath); + if (options.platform === 'win32') { + rewriteInstalledWindowsPluginConfig(installPaths.pluginConfigPath); + } + return { + updated, + configPath: installPaths.pluginConfigPath, + }; +} diff --git a/src/main/runtime/mpv-main-event-bindings.ts b/src/main/runtime/mpv-main-event-bindings.ts index a3de05e..941ef21 100644 --- a/src/main/runtime/mpv-main-event-bindings.ts +++ b/src/main/runtime/mpv-main-event-bindings.ts @@ -78,8 +78,7 @@ export function createBindMpvMainEventHandlersHandler(deps: { reportJellyfinRemoteStopped: () => deps.reportJellyfinRemoteStopped(), refreshDiscordPresence: () => deps.refreshDiscordPresence(), syncOverlayMpvSubtitleSuppression: () => deps.syncOverlayMpvSubtitleSuppression(), - hasInitialPlaybackQuitOnDisconnectArg: () => - deps.hasInitialPlaybackQuitOnDisconnectArg(), + hasInitialPlaybackQuitOnDisconnectArg: () => deps.hasInitialPlaybackQuitOnDisconnectArg(), isOverlayRuntimeInitialized: () => deps.isOverlayRuntimeInitialized(), shouldQuitOnDisconnectWhenOverlayRuntimeInitialized: () => deps.shouldQuitOnDisconnectWhenOverlayRuntimeInitialized(), @@ -88,7 +87,11 @@ export function createBindMpvMainEventHandlersHandler(deps: { isMpvConnected: () => deps.isMpvConnected(), quitApp: () => deps.quitApp(), }); - const handleMpvConnectionChangeWithSidebarReset = ({ connected }: { connected: boolean }): void => { + const handleMpvConnectionChangeWithSidebarReset = ({ + connected, + }: { + connected: boolean; + }): void => { if (connected) { deps.resetSubtitleSidebarEmbeddedLayout(); } diff --git a/src/main/runtime/mpv-main-event-main-deps.ts b/src/main/runtime/mpv-main-event-main-deps.ts index 9a74abe..f9acd77 100644 --- a/src/main/runtime/mpv-main-event-main-deps.ts +++ b/src/main/runtime/mpv-main-event-main-deps.ts @@ -4,14 +4,12 @@ export function createBuildBindMpvMainEventHandlersMainDepsHandler(deps: { appState: { initialArgs?: { jellyfinPlay?: unknown; youtubePlay?: unknown } | null; overlayRuntimeInitialized: boolean; - mpvClient: - | { - connected?: boolean; - currentSecondarySubText?: string; - currentTimePos?: number; - requestProperty?: (name: string) => Promise; - } - | null; + mpvClient: { + connected?: boolean; + currentSecondarySubText?: string; + currentTimePos?: number; + requestProperty?: (name: string) => Promise; + } | null; immersionTracker: { recordSubtitleLine?: ( text: string, diff --git a/src/main/runtime/overlay-modal-input-state.test.ts b/src/main/runtime/overlay-modal-input-state.test.ts new file mode 100644 index 0000000..cda2d19 --- /dev/null +++ b/src/main/runtime/overlay-modal-input-state.test.ts @@ -0,0 +1,87 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createOverlayModalInputState } from './overlay-modal-input-state'; + +function createModalWindow() { + const calls: string[] = []; + let destroyed = false; + let focused = false; + let webContentsFocused = false; + + return { + calls, + setDestroyed(next: boolean) { + destroyed = next; + }, + setFocused(next: boolean) { + focused = next; + }, + setWebContentsFocused(next: boolean) { + webContentsFocused = next; + }, + isDestroyed: () => destroyed, + setIgnoreMouseEvents: (ignore: boolean) => { + calls.push(`ignore:${ignore}`); + }, + setAlwaysOnTop: (flag: boolean, level?: string, relativeLevel?: number) => { + calls.push(`top:${flag}:${level ?? ''}:${relativeLevel ?? ''}`); + }, + focus: () => { + focused = true; + calls.push('focus'); + }, + isFocused: () => focused, + webContents: { + isFocused: () => webContentsFocused, + focus: () => { + webContentsFocused = true; + calls.push('web-focus'); + }, + }, + }; +} + +test('overlay modal input state activates modal window interactivity and syncs dependents', () => { + const modalWindow = createModalWindow(); + const calls: string[] = []; + const state = createOverlayModalInputState({ + getModalWindow: () => modalWindow as never, + syncOverlayShortcutsForModal: (isActive) => { + calls.push(`shortcuts:${isActive}`); + }, + syncOverlayVisibilityForModal: () => { + calls.push('visibility'); + }, + }); + + state.handleModalInputStateChange(true); + + assert.equal(state.getModalInputExclusive(), true); + assert.deepEqual(modalWindow.calls, [ + 'ignore:false', + 'top:true:screen-saver:1', + 'focus', + 'web-focus', + ]); + assert.deepEqual(calls, ['shortcuts:true', 'visibility']); +}); + +test('overlay modal input state is idempotent for unchanged state', () => { + const calls: string[] = []; + const state = createOverlayModalInputState({ + getModalWindow: () => null, + syncOverlayShortcutsForModal: (isActive) => { + calls.push(`shortcuts:${isActive}`); + }, + syncOverlayVisibilityForModal: () => { + calls.push('visibility'); + }, + }); + + state.handleModalInputStateChange(false); + state.handleModalInputStateChange(true); + state.handleModalInputStateChange(true); + + assert.equal(state.getModalInputExclusive(), true); + assert.deepEqual(calls, ['shortcuts:true', 'visibility']); +}); diff --git a/src/main/runtime/overlay-modal-input-state.ts b/src/main/runtime/overlay-modal-input-state.ts new file mode 100644 index 0000000..b095ca1 --- /dev/null +++ b/src/main/runtime/overlay-modal-input-state.ts @@ -0,0 +1,38 @@ +import type { BrowserWindow } from 'electron'; + +export type OverlayModalInputStateDeps = { + getModalWindow: () => BrowserWindow | null; + syncOverlayShortcutsForModal: (isActive: boolean) => void; + syncOverlayVisibilityForModal: () => void; +}; + +export function createOverlayModalInputState(deps: OverlayModalInputStateDeps) { + let modalInputExclusive = false; + + const handleModalInputStateChange = (isActive: boolean): void => { + if (modalInputExclusive === isActive) { + return; + } + + modalInputExclusive = isActive; + if (isActive) { + const modalWindow = deps.getModalWindow(); + if (modalWindow && !modalWindow.isDestroyed()) { + modalWindow.setIgnoreMouseEvents(false); + modalWindow.setAlwaysOnTop(true, 'screen-saver', 1); + modalWindow.focus(); + if (!modalWindow.webContents.isFocused()) { + modalWindow.webContents.focus(); + } + } + } + + deps.syncOverlayShortcutsForModal(isActive); + deps.syncOverlayVisibilityForModal(); + }; + + return { + getModalInputExclusive: (): boolean => modalInputExclusive, + handleModalInputStateChange, + }; +} diff --git a/src/main/runtime/overlay-runtime-options.ts b/src/main/runtime/overlay-runtime-options.ts index 7a2cea9..ce51c3f 100644 --- a/src/main/runtime/overlay-runtime-options.ts +++ b/src/main/runtime/overlay-runtime-options.ts @@ -2,9 +2,9 @@ import type { AnkiConnectConfig, KikuFieldGroupingChoice, KikuFieldGroupingRequestData, - WindowGeometry, -} from '../../types'; +} from '../../types/anki'; import type { BrowserWindow } from 'electron'; +import type { WindowGeometry } from '../../types/runtime'; import type { BaseWindowTracker } from '../../window-trackers'; type OverlayRuntimeOptions = { diff --git a/src/main/runtime/setup-window-factory.test.ts b/src/main/runtime/setup-window-factory.test.ts new file mode 100644 index 0000000..74bdc6a --- /dev/null +++ b/src/main/runtime/setup-window-factory.test.ts @@ -0,0 +1,79 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { + createCreateAnilistSetupWindowHandler, + createCreateFirstRunSetupWindowHandler, + createCreateJellyfinSetupWindowHandler, +} from './setup-window-factory'; + +test('createCreateFirstRunSetupWindowHandler builds first-run setup window', () => { + let options: Electron.BrowserWindowConstructorOptions | null = null; + const createSetupWindow = createCreateFirstRunSetupWindowHandler({ + createBrowserWindow: (nextOptions) => { + options = nextOptions; + return { id: 'first-run' } as never; + }, + }); + + assert.deepEqual(createSetupWindow(), { id: 'first-run' }); + assert.deepEqual(options, { + width: 480, + height: 460, + title: 'SubMiner Setup', + show: true, + autoHideMenuBar: true, + resizable: false, + minimizable: false, + maximizable: false, + webPreferences: { + nodeIntegration: false, + contextIsolation: true, + }, + }); +}); + +test('createCreateJellyfinSetupWindowHandler builds jellyfin setup window', () => { + let options: Electron.BrowserWindowConstructorOptions | null = null; + const createSetupWindow = createCreateJellyfinSetupWindowHandler({ + createBrowserWindow: (nextOptions) => { + options = nextOptions; + return { id: 'jellyfin' } as never; + }, + }); + + assert.deepEqual(createSetupWindow(), { id: 'jellyfin' }); + assert.deepEqual(options, { + width: 520, + height: 560, + title: 'Jellyfin Setup', + show: true, + autoHideMenuBar: true, + webPreferences: { + nodeIntegration: false, + contextIsolation: true, + }, + }); +}); + +test('createCreateAnilistSetupWindowHandler builds anilist setup window', () => { + let options: Electron.BrowserWindowConstructorOptions | null = null; + const createSetupWindow = createCreateAnilistSetupWindowHandler({ + createBrowserWindow: (nextOptions) => { + options = nextOptions; + return { id: 'anilist' } as never; + }, + }); + + assert.deepEqual(createSetupWindow(), { id: 'anilist' }); + assert.deepEqual(options, { + width: 1000, + height: 760, + title: 'Anilist Setup', + show: true, + autoHideMenuBar: true, + webPreferences: { + nodeIntegration: false, + contextIsolation: true, + }, + }); +}); diff --git a/src/main/runtime/setup-window-factory.ts b/src/main/runtime/setup-window-factory.ts new file mode 100644 index 0000000..18d4002 --- /dev/null +++ b/src/main/runtime/setup-window-factory.ts @@ -0,0 +1,62 @@ +interface SetupWindowConfig { + width: number; + height: number; + title: string; + resizable?: boolean; + minimizable?: boolean; + maximizable?: boolean; +} + +function createSetupWindowHandler( + deps: { createBrowserWindow: (options: Electron.BrowserWindowConstructorOptions) => TWindow }, + config: SetupWindowConfig, +) { + return (): TWindow => + deps.createBrowserWindow({ + width: config.width, + height: config.height, + title: config.title, + show: true, + autoHideMenuBar: true, + ...(config.resizable === undefined ? {} : { resizable: config.resizable }), + ...(config.minimizable === undefined ? {} : { minimizable: config.minimizable }), + ...(config.maximizable === undefined ? {} : { maximizable: config.maximizable }), + webPreferences: { + nodeIntegration: false, + contextIsolation: true, + }, + }); +} + +export function createCreateFirstRunSetupWindowHandler(deps: { + createBrowserWindow: (options: Electron.BrowserWindowConstructorOptions) => TWindow; +}) { + return createSetupWindowHandler(deps, { + width: 480, + height: 460, + title: 'SubMiner Setup', + resizable: false, + minimizable: false, + maximizable: false, + }); +} + +export function createCreateJellyfinSetupWindowHandler(deps: { + createBrowserWindow: (options: Electron.BrowserWindowConstructorOptions) => TWindow; +}) { + return createSetupWindowHandler(deps, { + width: 520, + height: 560, + title: 'Jellyfin Setup', + }); +} + +export function createCreateAnilistSetupWindowHandler(deps: { + createBrowserWindow: (options: Electron.BrowserWindowConstructorOptions) => TWindow; +}) { + return createSetupWindowHandler(deps, { + width: 1000, + height: 760, + title: 'Anilist Setup', + }); +} diff --git a/src/main/runtime/startup-autoplay-release-policy.test.ts b/src/main/runtime/startup-autoplay-release-policy.test.ts index d9da23b..f3bb708 100644 --- a/src/main/runtime/startup-autoplay-release-policy.test.ts +++ b/src/main/runtime/startup-autoplay-release-policy.test.ts @@ -14,9 +14,7 @@ test('autoplay release keeps the short retry budget for normal playback signals' test('autoplay release uses the full startup timeout window while paused', () => { assert.equal( resolveAutoplayReadyMaxReleaseAttempts({ forceWhilePaused: true }), - Math.ceil( - STARTUP_AUTOPLAY_RELEASE_TIMEOUT_MS / DEFAULT_AUTOPLAY_RELEASE_RETRY_DELAY_MS, - ), + Math.ceil(STARTUP_AUTOPLAY_RELEASE_TIMEOUT_MS / DEFAULT_AUTOPLAY_RELEASE_RETRY_DELAY_MS), ); }); diff --git a/src/main/runtime/startup-autoplay-release-policy.ts b/src/main/runtime/startup-autoplay-release-policy.ts index e46b17f..dabe846 100644 --- a/src/main/runtime/startup-autoplay-release-policy.ts +++ b/src/main/runtime/startup-autoplay-release-policy.ts @@ -22,7 +22,4 @@ export function resolveAutoplayReadyMaxReleaseAttempts(options?: { return Math.max(3, Math.ceil(startupTimeoutMs / retryDelayMs)); } -export { - DEFAULT_AUTOPLAY_RELEASE_RETRY_DELAY_MS, - STARTUP_AUTOPLAY_RELEASE_TIMEOUT_MS, -}; +export { DEFAULT_AUTOPLAY_RELEASE_RETRY_DELAY_MS, STARTUP_AUTOPLAY_RELEASE_TIMEOUT_MS }; diff --git a/src/main/runtime/subtitle-prefetch-runtime.test.ts b/src/main/runtime/subtitle-prefetch-runtime.test.ts new file mode 100644 index 0000000..0708089 --- /dev/null +++ b/src/main/runtime/subtitle-prefetch-runtime.test.ts @@ -0,0 +1,59 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createResolveActiveSubtitleSidebarSourceHandler } from './subtitle-prefetch-runtime'; + +test('subtitle prefetch runtime resolves direct external subtitle sources first', async () => { + const resolveSource = createResolveActiveSubtitleSidebarSourceHandler({ + getFfmpegPath: () => 'ffmpeg', + extractInternalSubtitleTrack: async () => { + throw new Error('should not extract external tracks'); + }, + }); + + const resolved = await resolveSource({ + currentExternalFilenameRaw: ' /tmp/current.ass ', + currentTrackRaw: null, + trackListRaw: null, + sidRaw: null, + videoPath: '/media/video.mkv', + }); + + assert.deepEqual(resolved, { + path: '/tmp/current.ass', + sourceKey: '/tmp/current.ass', + }); +}); + +test('subtitle prefetch runtime extracts internal subtitle tracks into a stable source key', async () => { + const resolveSource = createResolveActiveSubtitleSidebarSourceHandler({ + getFfmpegPath: () => 'ffmpeg-custom', + extractInternalSubtitleTrack: async (ffmpegPath, videoPath, track) => { + assert.equal(ffmpegPath, 'ffmpeg-custom'); + assert.equal(videoPath, '/media/video.mkv'); + assert.equal((track as Record)['ff-index'], 7); + return { + path: '/tmp/subminer-sidebar-123/track_7.ass', + cleanup: async () => {}, + }; + }, + }); + + const resolved = await resolveSource({ + currentExternalFilenameRaw: null, + currentTrackRaw: { + type: 'sub', + id: 3, + 'ff-index': 7, + codec: 'ass', + }, + trackListRaw: [], + sidRaw: 3, + videoPath: '/media/video.mkv', + }); + + assert.deepEqual(resolved, { + path: '/tmp/subminer-sidebar-123/track_7.ass', + sourceKey: 'internal:/media/video.mkv:track:3:ff:7', + cleanup: resolved?.cleanup, + }); +}); diff --git a/src/main/runtime/subtitle-prefetch-runtime.ts b/src/main/runtime/subtitle-prefetch-runtime.ts new file mode 100644 index 0000000..27da41d --- /dev/null +++ b/src/main/runtime/subtitle-prefetch-runtime.ts @@ -0,0 +1,180 @@ +import type { SubtitlePrefetchInitController } from './subtitle-prefetch-init'; +import { buildSubtitleSidebarSourceKey } from './subtitle-prefetch-source'; + +type MpvSubtitleTrackLike = { + type?: unknown; + id?: unknown; + selected?: unknown; + external?: unknown; + codec?: unknown; + 'ff-index'?: unknown; + 'external-filename'?: unknown; +}; + +type ActiveSubtitleSidebarSource = { + path: string; + sourceKey: string; + cleanup?: () => Promise; +}; + +function parseTrackId(value: unknown): number | null { + if (typeof value === 'number' && Number.isInteger(value)) { + return value; + } + if (typeof value === 'string') { + const parsed = Number(value.trim()); + return Number.isInteger(parsed) ? parsed : null; + } + return null; +} + +function getActiveSubtitleTrack( + currentTrackRaw: unknown, + trackListRaw: unknown, + sidRaw: unknown, +): MpvSubtitleTrackLike | null { + if (currentTrackRaw && typeof currentTrackRaw === 'object') { + const track = currentTrackRaw as MpvSubtitleTrackLike; + if (track.type === undefined || track.type === 'sub') { + return track; + } + } + + const sid = parseTrackId(sidRaw); + if (!Array.isArray(trackListRaw)) { + return null; + } + + const bySid = + sid === null + ? null + : ((trackListRaw.find((entry: unknown) => { + if (!entry || typeof entry !== 'object') { + return false; + } + const track = entry as MpvSubtitleTrackLike; + return track.type === 'sub' && parseTrackId(track.id) === sid; + }) as MpvSubtitleTrackLike | undefined) ?? null); + if (bySid) { + return bySid; + } + + return ( + (trackListRaw.find((entry: unknown) => { + if (!entry || typeof entry !== 'object') { + return false; + } + const track = entry as MpvSubtitleTrackLike; + return track.type === 'sub' && track.selected === true; + }) as MpvSubtitleTrackLike | undefined) ?? null + ); +} + +export function createResolveActiveSubtitleSidebarSourceHandler(deps: { + getFfmpegPath: () => string; + extractInternalSubtitleTrack: ( + ffmpegPath: string, + videoPath: string, + track: MpvSubtitleTrackLike, + ) => Promise<{ path: string; cleanup: () => Promise } | null>; +}) { + return async (input: { + currentExternalFilenameRaw: unknown; + currentTrackRaw: unknown; + trackListRaw: unknown; + sidRaw: unknown; + videoPath: string; + }): Promise => { + const currentExternalFilename = + typeof input.currentExternalFilenameRaw === 'string' + ? input.currentExternalFilenameRaw.trim() + : ''; + if (currentExternalFilename) { + return { path: currentExternalFilename, sourceKey: currentExternalFilename }; + } + + const track = getActiveSubtitleTrack(input.currentTrackRaw, input.trackListRaw, input.sidRaw); + if (!track) { + return null; + } + + const externalFilename = + typeof track['external-filename'] === 'string' ? track['external-filename'].trim() : ''; + if (externalFilename) { + return { path: externalFilename, sourceKey: externalFilename }; + } + + const extracted = await deps.extractInternalSubtitleTrack( + deps.getFfmpegPath(), + input.videoPath, + track, + ); + if (!extracted) { + return null; + } + + return { + ...extracted, + sourceKey: buildSubtitleSidebarSourceKey(input.videoPath, track, extracted.path), + }; + }; +} + +export function createRefreshSubtitlePrefetchFromActiveTrackHandler(deps: { + getMpvClient: () => { + connected?: boolean; + requestProperty: (name: string) => Promise; + } | null; + getLastObservedTimePos: () => number; + subtitlePrefetchInitController: SubtitlePrefetchInitController; + resolveActiveSubtitleSidebarSource: ( + input: Parameters>[0], + ) => Promise; +}) { + return async (): Promise => { + const client = deps.getMpvClient(); + if (!client?.connected) { + return; + } + + try { + const [currentExternalFilenameRaw, currentTrackRaw, trackListRaw, sidRaw, videoPathRaw] = + await Promise.all([ + client.requestProperty('current-tracks/sub/external-filename').catch(() => null), + client.requestProperty('current-tracks/sub').catch(() => null), + client.requestProperty('track-list'), + client.requestProperty('sid'), + client.requestProperty('path'), + ]); + const videoPath = typeof videoPathRaw === 'string' ? videoPathRaw : ''; + if (!videoPath) { + deps.subtitlePrefetchInitController.cancelPendingInit(); + return; + } + + const resolvedSource = await deps.resolveActiveSubtitleSidebarSource({ + currentExternalFilenameRaw, + currentTrackRaw, + trackListRaw, + sidRaw, + videoPath, + }); + if (!resolvedSource) { + deps.subtitlePrefetchInitController.cancelPendingInit(); + return; + } + + try { + await deps.subtitlePrefetchInitController.initSubtitlePrefetch( + resolvedSource.path, + deps.getLastObservedTimePos(), + resolvedSource.sourceKey, + ); + } finally { + await resolvedSource.cleanup?.(); + } + } catch { + // Skip refresh when the track query fails. + } + }; +} diff --git a/src/main/runtime/windows-mpv-launch.ts b/src/main/runtime/windows-mpv-launch.ts index 7501565..4424f2f 100644 --- a/src/main/runtime/windows-mpv-launch.ts +++ b/src/main/runtime/windows-mpv-launch.ts @@ -33,10 +33,7 @@ export function resolveWindowsMpvPath(deps: WindowsMpvLaunchDeps): string { return ''; } -export function buildWindowsMpvLaunchArgs( - targets: string[], - extraArgs: string[] = [], -): string[] { +export function buildWindowsMpvLaunchArgs(targets: string[], extraArgs: string[] = []): string[] { return ['--player-operation-mode=pseudo-gui', '--profile=subminer', ...extraArgs, ...targets]; } diff --git a/src/main/runtime/windows-mpv-shortcuts.ts b/src/main/runtime/windows-mpv-shortcuts.ts index 09dcec8..013885c 100644 --- a/src/main/runtime/windows-mpv-shortcuts.ts +++ b/src/main/runtime/windows-mpv-shortcuts.ts @@ -24,7 +24,7 @@ export interface WindowsMpvShortcutInstallResult { } export function resolveWindowsStartMenuProgramsDir(appDataDir: string): string { - return path.join(appDataDir, 'Microsoft', 'Windows', 'Start Menu', 'Programs'); + return path.win32.join(appDataDir, 'Microsoft', 'Windows', 'Start Menu', 'Programs'); } export function resolveWindowsMpvShortcutPaths(options: { @@ -32,11 +32,11 @@ export function resolveWindowsMpvShortcutPaths(options: { desktopDir: string; }): WindowsMpvShortcutPaths { return { - startMenuPath: path.join( + startMenuPath: path.win32.join( resolveWindowsStartMenuProgramsDir(options.appDataDir), WINDOWS_MPV_SHORTCUT_NAME, ), - desktopPath: path.join(options.desktopDir, WINDOWS_MPV_SHORTCUT_NAME), + desktopPath: path.win32.join(options.desktopDir, WINDOWS_MPV_SHORTCUT_NAME), }; } @@ -54,7 +54,7 @@ export function buildWindowsMpvShortcutDetails(exePath: string): WindowsShortcut return { target: exePath, args: '--launch-mpv', - cwd: path.dirname(exePath), + cwd: path.win32.dirname(exePath), description: 'Launch mpv with the SubMiner profile', icon: exePath, iconIndex: 0, @@ -79,7 +79,7 @@ export function applyWindowsMpvShortcuts(options: { const failures: string[] = []; const ensureShortcut = (shortcutPath: string): void => { - mkdirSync(path.dirname(shortcutPath), { recursive: true }); + mkdirSync(path.win32.dirname(shortcutPath), { recursive: true }); const ok = options.writeShortcutLink(shortcutPath, 'replace', details); if (!ok) { failures.push(shortcutPath); diff --git a/src/main/runtime/youtube-flow.test.ts b/src/main/runtime/youtube-flow.test.ts index b0125d3..6e48b85 100644 --- a/src/main/runtime/youtube-flow.test.ts +++ b/src/main/runtime/youtube-flow.test.ts @@ -141,9 +141,7 @@ test('youtube flow can open a manual picker session and load the selected subtit assert.ok( commands.some( (command) => - command[0] === 'set_property' && - command[1] === 'sub-visibility' && - command[2] === 'yes', + command[0] === 'set_property' && command[1] === 'sub-visibility' && command[2] === 'yes', ), ); assert.ok( @@ -263,9 +261,7 @@ test('youtube flow retries secondary after partial batch subtitle failure', asyn assert.ok( commands.some( (command) => - command[0] === 'sub-add' && - command[1] === '/tmp/manual:en.vtt' && - command[2] === 'cached', + command[0] === 'sub-add' && command[1] === '/tmp/manual:en.vtt' && command[2] === 'cached', ), ); }); @@ -708,12 +704,54 @@ test('youtube flow leaves non-authoritative youtube subtitle tracks untouched af return selectedSecondarySid; } return [ - { type: 'sub', id: 1, lang: 'en', title: 'English', external: true, 'external-filename': null }, - { type: 'sub', id: 2, lang: 'ja', title: 'Japanese', external: true, 'external-filename': null }, - { type: 'sub', id: 3, lang: 'ja-en', title: 'Japanese from English', external: true, 'external-filename': null }, - { type: 'sub', id: 4, lang: 'ja-ja', title: 'Japanese from Japanese', external: true, 'external-filename': null }, - { type: 'sub', id: 5, lang: 'ja-orig', title: 'auto-ja-orig.vtt', external: true, 'external-filename': '/tmp/auto-ja-orig.vtt' }, - { type: 'sub', id: 6, lang: 'en', title: 'manual-en.en.srt', external: true, 'external-filename': '/tmp/manual-en.en.srt' }, + { + type: 'sub', + id: 1, + lang: 'en', + title: 'English', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 2, + lang: 'ja', + title: 'Japanese', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 3, + lang: 'ja-en', + title: 'Japanese from English', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 4, + lang: 'ja-ja', + title: 'Japanese from Japanese', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 5, + lang: 'ja-orig', + title: 'auto-ja-orig.vtt', + external: true, + 'external-filename': '/tmp/auto-ja-orig.vtt', + }, + { + type: 'sub', + id: 6, + lang: 'en', + title: 'manual-en.en.srt', + external: true, + 'external-filename': '/tmp/manual-en.en.srt', + }, ]; }, refreshCurrentSubtitle: () => {}, @@ -737,7 +775,10 @@ test('youtube flow leaves non-authoritative youtube subtitle tracks untouched af await runtime.openManualPicker({ url: 'https://example.com' }); - assert.equal(commands.some((command) => command[0] === 'sub-remove'), false); + assert.equal( + commands.some((command) => command[0] === 'sub-remove'), + false, + ); }); test('youtube flow reuses existing manual youtube subtitle tracks when both requested languages already exist', async () => { @@ -751,8 +792,20 @@ test('youtube flow reuses existing manual youtube subtitle tracks when both requ videoId: 'video123', title: 'Video 123', tracks: [ - { ...primaryTrack, id: 'manual:ja', sourceLanguage: 'ja', kind: 'manual', title: 'Japanese' }, - { ...secondaryTrack, id: 'manual:en', sourceLanguage: 'en', kind: 'manual', title: 'English' }, + { + ...primaryTrack, + id: 'manual:ja', + sourceLanguage: 'ja', + kind: 'manual', + title: 'Japanese', + }, + { + ...secondaryTrack, + id: 'manual:en', + sourceLanguage: 'en', + kind: 'manual', + title: 'English', + }, ], }), acquireYoutubeSubtitleTracks: async () => { @@ -801,10 +854,38 @@ test('youtube flow reuses existing manual youtube subtitle tracks when both requ return selectedSecondarySid; } return [ - { type: 'sub', id: 1, lang: 'en', title: 'English', external: true, 'external-filename': null }, - { type: 'sub', id: 2, lang: 'ja', title: 'Japanese', external: true, 'external-filename': null }, - { type: 'sub', id: 3, lang: 'ja-en', title: 'Japanese from English', external: true, 'external-filename': null }, - { type: 'sub', id: 4, lang: 'ja-ja', title: 'Japanese from Japanese', external: true, 'external-filename': null }, + { + type: 'sub', + id: 1, + lang: 'en', + title: 'English', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 2, + lang: 'ja', + title: 'Japanese', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 3, + lang: 'ja-en', + title: 'Japanese from English', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 4, + lang: 'ja-ja', + title: 'Japanese from Japanese', + external: true, + 'external-filename': null, + }, ]; }, refreshCurrentSubtitle: () => {}, @@ -833,9 +914,15 @@ test('youtube flow reuses existing manual youtube subtitle tracks when both requ assert.equal(selectedPrimarySid, 2); assert.equal(selectedSecondarySid, 1); - assert.equal(commands.some((command) => command[0] === 'sub-add'), false); + assert.equal( + commands.some((command) => command[0] === 'sub-add'), + false, + ); assert.deepEqual(refreshedSidebarSources, ['/tmp/manual-ja.ja.srt']); - assert.equal(commands.some((command) => command[0] === 'sub-remove'), false); + assert.equal( + commands.some((command) => command[0] === 'sub-remove'), + false, + ); }); test('youtube flow waits for manual youtube tracks to appear before falling back to injected copies', async () => { @@ -849,8 +936,20 @@ test('youtube flow waits for manual youtube tracks to appear before falling back videoId: 'video123', title: 'Video 123', tracks: [ - { ...primaryTrack, id: 'manual:ja', sourceLanguage: 'ja', kind: 'manual', title: 'Japanese' }, - { ...secondaryTrack, id: 'manual:en', sourceLanguage: 'en', kind: 'manual', title: 'English' }, + { + ...primaryTrack, + id: 'manual:ja', + sourceLanguage: 'ja', + kind: 'manual', + title: 'Japanese', + }, + { + ...secondaryTrack, + id: 'manual:en', + sourceLanguage: 'en', + kind: 'manual', + title: 'English', + }, ], }), acquireYoutubeSubtitleTracks: async () => { @@ -903,10 +1002,38 @@ test('youtube flow waits for manual youtube tracks to appear before falling back return []; } return [ - { type: 'sub', id: 1, lang: 'en', title: 'English', external: true, 'external-filename': null }, - { type: 'sub', id: 2, lang: 'ja', title: 'Japanese', external: true, 'external-filename': null }, - { type: 'sub', id: 3, lang: 'ja-en', title: 'Japanese from English', external: true, 'external-filename': null }, - { type: 'sub', id: 4, lang: 'ja-ja', title: 'Japanese from Japanese', external: true, 'external-filename': null }, + { + type: 'sub', + id: 1, + lang: 'en', + title: 'English', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 2, + lang: 'ja', + title: 'Japanese', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 3, + lang: 'ja-en', + title: 'Japanese from English', + external: true, + 'external-filename': null, + }, + { + type: 'sub', + id: 4, + lang: 'ja-ja', + title: 'Japanese from Japanese', + external: true, + 'external-filename': null, + }, ]; }, refreshCurrentSubtitle: () => {}, @@ -932,7 +1059,10 @@ test('youtube flow waits for manual youtube tracks to appear before falling back assert.equal(selectedPrimarySid, 2); assert.equal(selectedSecondarySid, 1); - assert.equal(commands.some((command) => command[0] === 'sub-add'), false); + assert.equal( + commands.some((command) => command[0] === 'sub-add'), + false, + ); }); test('youtube flow reuses manual youtube tracks even when mpv exposes external filenames', async () => { @@ -970,7 +1100,9 @@ test('youtube flow reuses manual youtube tracks even when mpv exposes external f if (track.id === 'manual:ja') { return { path: '/tmp/manual-ja.ja.srt' }; } - throw new Error('should not download secondary track when existing manual english track is reusable'); + throw new Error( + 'should not download secondary track when existing manual english track is reusable', + ); }, openPicker: async () => false, pauseMpv: () => {}, @@ -1051,7 +1183,10 @@ test('youtube flow reuses manual youtube tracks even when mpv exposes external f assert.equal(selectedPrimarySid, 2); assert.equal(selectedSecondarySid, 1); - assert.equal(commands.some((command) => command[0] === 'sub-add'), false); + assert.equal( + commands.some((command) => command[0] === 'sub-add'), + false, + ); }); test('youtube flow falls back to existing auto secondary track when auto secondary download fails', async () => { diff --git a/src/main/runtime/youtube-flow.ts b/src/main/runtime/youtube-flow.ts index db72b76..d4f411e 100644 --- a/src/main/runtime/youtube-flow.ts +++ b/src/main/runtime/youtube-flow.ts @@ -384,7 +384,9 @@ async function injectDownloadedSubtitles( } else { deps.warn( `Unable to bind downloaded primary subtitle track in mpv: ${ - primarySelection.injectedPath ? path.basename(primarySelection.injectedPath) : primarySelection.track.label + primarySelection.injectedPath + ? path.basename(primarySelection.injectedPath) + : primarySelection.track.label }`, ); } @@ -415,9 +417,7 @@ async function injectDownloadedSubtitles( deps.refreshCurrentSubtitle(currentSubText); } - deps.showMpvOsd( - secondaryTrack ? 'Primary and secondary subtitles loaded.' : 'Subtitles loaded.', - ); + deps.showMpvOsd(secondaryTrack ? 'Primary and secondary subtitles loaded.' : 'Subtitles loaded.'); return true; } @@ -587,7 +587,8 @@ export function createYoutubeFlowRuntime(deps: YoutubeFlowDeps) { existingPrimaryTrackId, ) : null; - const primaryReady = input.primaryTrack.kind !== 'manual' || existingPrimaryTrackId !== null; + const primaryReady = + input.primaryTrack.kind !== 'manual' || existingPrimaryTrackId !== null; const secondaryReady = !input.secondaryTrack || input.secondaryTrack.kind !== 'manual' || @@ -631,7 +632,11 @@ export function createYoutubeFlowRuntime(deps: YoutubeFlowDeps) { secondaryInjectedPath = acquired.secondaryPath; } - if (input.secondaryTrack && existingSecondaryTrackId === null && secondaryInjectedPath === null) { + if ( + input.secondaryTrack && + existingSecondaryTrackId === null && + secondaryInjectedPath === null + ) { try { secondaryInjectedPath = ( await deps.acquireYoutubeSubtitleTrack({ diff --git a/src/main/runtime/youtube-playback-launch.test.ts b/src/main/runtime/youtube-playback-launch.test.ts index a5ae968..3bf9d9a 100644 --- a/src/main/runtime/youtube-playback-launch.test.ts +++ b/src/main/runtime/youtube-playback-launch.test.ts @@ -183,7 +183,13 @@ test('prepare youtube playback accepts a non-youtube resolved path once playable '/videos/episode01.mkv', 'https://rr16---sn.example.googlevideo.com/videoplayback?id=abc', ]; - const observedTrackLists = [[], [{ type: 'video', id: 1 }, { type: 'audio', id: 2 }]]; + const observedTrackLists = [ + [], + [ + { type: 'video', id: 1 }, + { type: 'audio', id: 2 }, + ], + ]; let requestCount = 0; const prepare = createPrepareYoutubePlaybackInMpvHandler({ requestPath: async () => { @@ -256,11 +262,14 @@ test('prepare youtube playback does not accept a different youtube video after p test('prepare youtube playback accepts a fresh-start path change when the direct target matches exactly', async () => { const commands: Array> = []; - const observedPaths = [ - '', - 'https://rr16---sn.example.googlevideo.com/videoplayback?id=abc', + const observedPaths = ['', 'https://rr16---sn.example.googlevideo.com/videoplayback?id=abc']; + const observedTrackLists = [ + [], + [ + { type: 'video', id: 1 }, + { type: 'audio', id: 2 }, + ], ]; - const observedTrackLists = [[], [{ type: 'video', id: 1 }, { type: 'audio', id: 2 }]]; let requestCount = 0; const prepare = createPrepareYoutubePlaybackInMpvHandler({ requestPath: async () => { diff --git a/src/main/runtime/youtube-playback-launch.ts b/src/main/runtime/youtube-playback-launch.ts index 0de1e16..166968b 100644 --- a/src/main/runtime/youtube-playback-launch.ts +++ b/src/main/runtime/youtube-playback-launch.ts @@ -74,7 +74,9 @@ function hasPlayableMediaTracks(trackListRaw: unknown): boolean { if (!Array.isArray(trackListRaw)) return false; return trackListRaw.some((track) => { if (!track || typeof track !== 'object') return false; - const type = String((track as Record).type || '').trim().toLowerCase(); + const type = String((track as Record).type || '') + .trim() + .toLowerCase(); return type === 'video' || type === 'audio'; }); } diff --git a/src/main/runtime/youtube-playback-runtime.test.ts b/src/main/runtime/youtube-playback-runtime.test.ts new file mode 100644 index 0000000..5208bfc --- /dev/null +++ b/src/main/runtime/youtube-playback-runtime.test.ts @@ -0,0 +1,148 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { createYoutubePlaybackRuntime } from './youtube-playback-runtime'; + +test('youtube playback runtime resets flow ownership after a successful run', async () => { + const calls: string[] = []; + let appOwnedFlowInFlight = false; + let timeoutCallback: (() => void) | null = null; + let socketPath = '/tmp/mpv.sock'; + + const runtime = createYoutubePlaybackRuntime({ + platform: 'linux', + directPlaybackFormat: 'best', + mpvYtdlFormat: 'bestvideo+bestaudio', + autoLaunchTimeoutMs: 2_000, + connectTimeoutMs: 1_000, + getSocketPath: () => socketPath, + getMpvConnected: () => true, + invalidatePendingAutoplayReadyFallbacks: () => { + calls.push('invalidate-autoplay'); + }, + setAppOwnedFlowInFlight: (next) => { + appOwnedFlowInFlight = next; + calls.push(`app-owned:${next}`); + }, + ensureYoutubePlaybackRuntimeReady: async () => { + calls.push('ensure-runtime-ready'); + }, + resolveYoutubePlaybackUrl: async () => { + throw new Error('linux path should not resolve direct playback url'); + }, + launchWindowsMpv: () => ({ ok: false }), + waitForYoutubeMpvConnected: async (timeoutMs) => { + calls.push(`wait-connected:${timeoutMs}`); + return true; + }, + prepareYoutubePlaybackInMpv: async ({ url }) => { + calls.push(`prepare:${url}`); + return true; + }, + runYoutubePlaybackFlow: async ({ url, mode }) => { + calls.push(`run-flow:${url}:${mode}`); + }, + logInfo: (message) => { + calls.push(`info:${message}`); + }, + logWarn: (message) => { + calls.push(`warn:${message}`); + }, + schedule: (callback) => { + timeoutCallback = callback; + calls.push('schedule-arm'); + return 1 as never; + }, + clearScheduled: () => { + calls.push('clear-scheduled'); + }, + }); + + await runtime.runYoutubePlaybackFlow({ + url: 'https://youtu.be/demo', + mode: 'download', + source: 'initial', + }); + + assert.equal(appOwnedFlowInFlight, false); + assert.equal(runtime.getQuitOnDisconnectArmed(), false); + assert.deepEqual(calls.slice(0, 6), [ + 'invalidate-autoplay', + 'app-owned:true', + 'ensure-runtime-ready', + 'wait-connected:1000', + 'schedule-arm', + 'prepare:https://youtu.be/demo', + ]); + + assert.ok(timeoutCallback); + const scheduledCallback = timeoutCallback as () => void; + scheduledCallback(); + assert.equal(runtime.getQuitOnDisconnectArmed(), true); +}); + +test('youtube playback runtime resolves the socket path lazily for windows startup', async () => { + const calls: string[] = []; + let socketPath = '/tmp/initial.sock'; + + const runtime = createYoutubePlaybackRuntime({ + platform: 'win32', + directPlaybackFormat: 'best', + mpvYtdlFormat: 'bestvideo+bestaudio', + autoLaunchTimeoutMs: 2_000, + connectTimeoutMs: 1_000, + getSocketPath: () => socketPath, + getMpvConnected: () => false, + invalidatePendingAutoplayReadyFallbacks: () => { + calls.push('invalidate-autoplay'); + }, + setAppOwnedFlowInFlight: (next) => { + calls.push(`app-owned:${next}`); + }, + ensureYoutubePlaybackRuntimeReady: async () => { + calls.push('ensure-runtime-ready'); + }, + resolveYoutubePlaybackUrl: async (url, format) => { + calls.push(`resolve:${url}:${format}`); + return 'https://example.com/direct'; + }, + launchWindowsMpv: (_playbackUrl, args) => { + calls.push(`launch:${args.join(' ')}`); + return { ok: true, mpvPath: '/usr/bin/mpv' }; + }, + waitForYoutubeMpvConnected: async (timeoutMs) => { + calls.push(`wait-connected:${timeoutMs}`); + return true; + }, + prepareYoutubePlaybackInMpv: async ({ url }) => { + calls.push(`prepare:${url}`); + return true; + }, + runYoutubePlaybackFlow: async ({ url, mode }) => { + calls.push(`run-flow:${url}:${mode}`); + }, + logInfo: (message) => { + calls.push(`info:${message}`); + }, + logWarn: (message) => { + calls.push(`warn:${message}`); + }, + schedule: (callback) => { + calls.push('schedule-arm'); + callback(); + return 1 as never; + }, + clearScheduled: () => { + calls.push('clear-scheduled'); + }, + }); + + socketPath = '/tmp/updated.sock'; + + await runtime.runYoutubePlaybackFlow({ + url: 'https://youtu.be/demo', + mode: 'download', + source: 'initial', + }); + + assert.ok(calls.some((entry) => entry.includes('--input-ipc-server=/tmp/updated.sock'))); +}); diff --git a/src/main/runtime/youtube-playback-runtime.ts b/src/main/runtime/youtube-playback-runtime.ts new file mode 100644 index 0000000..0bf0815 --- /dev/null +++ b/src/main/runtime/youtube-playback-runtime.ts @@ -0,0 +1,150 @@ +import type { CliArgs, CliCommandSource } from '../../cli/args'; + +type LaunchResult = { + ok: boolean; + mpvPath?: string; +}; + +export type YoutubePlaybackRuntimeDeps = { + platform: NodeJS.Platform; + directPlaybackFormat: string; + mpvYtdlFormat: string; + autoLaunchTimeoutMs: number; + connectTimeoutMs: number; + getSocketPath: () => string; + getMpvConnected: () => boolean; + invalidatePendingAutoplayReadyFallbacks: () => void; + setAppOwnedFlowInFlight: (next: boolean) => void; + ensureYoutubePlaybackRuntimeReady: () => Promise; + resolveYoutubePlaybackUrl: (url: string, format: string) => Promise; + launchWindowsMpv: (playbackUrl: string, args: string[]) => LaunchResult; + waitForYoutubeMpvConnected: (timeoutMs: number) => Promise; + prepareYoutubePlaybackInMpv: (request: { url: string }) => Promise; + runYoutubePlaybackFlow: (request: { + url: string; + mode: NonNullable; + }) => Promise; + logInfo: (message: string) => void; + logWarn: (message: string) => void; + schedule: (callback: () => void, delayMs: number) => ReturnType; + clearScheduled: (timer: ReturnType) => void; +}; + +export function createYoutubePlaybackRuntime(deps: YoutubePlaybackRuntimeDeps) { + let quitOnDisconnectArmed = false; + let quitOnDisconnectArmTimer: ReturnType | null = null; + let playbackFlowGeneration = 0; + + const clearYoutubePlayQuitOnDisconnectArmTimer = (): void => { + if (quitOnDisconnectArmTimer) { + deps.clearScheduled(quitOnDisconnectArmTimer); + quitOnDisconnectArmTimer = null; + } + }; + + const runYoutubePlaybackFlow = async (request: { + url: string; + mode: NonNullable; + source: CliCommandSource; + }): Promise => { + const flowGeneration = ++playbackFlowGeneration; + deps.invalidatePendingAutoplayReadyFallbacks(); + deps.setAppOwnedFlowInFlight(true); + let flowCompleted = false; + + try { + clearYoutubePlayQuitOnDisconnectArmTimer(); + quitOnDisconnectArmed = false; + await deps.ensureYoutubePlaybackRuntimeReady(); + + let playbackUrl = request.url; + let launchedWindowsMpv = false; + if (deps.platform === 'win32') { + try { + playbackUrl = await deps.resolveYoutubePlaybackUrl( + request.url, + deps.directPlaybackFormat, + ); + deps.logInfo('Resolved direct YouTube playback URL for Windows MPV startup.'); + } catch (error) { + deps.logWarn( + `Failed to resolve direct YouTube playback URL; falling back to page URL: ${ + error instanceof Error ? error.message : String(error) + }`, + ); + } + } + + if (deps.platform === 'win32' && !deps.getMpvConnected()) { + const socketPath = deps.getSocketPath(); + const launchResult = deps.launchWindowsMpv(playbackUrl, [ + '--pause=yes', + '--ytdl=yes', + `--ytdl-format=${deps.mpvYtdlFormat}`, + '--sub-auto=no', + '--sub-file-paths=.;subs;subtitles', + '--sid=auto', + '--secondary-sid=auto', + '--secondary-sub-visibility=no', + '--alang=ja,jp,jpn,japanese,en,eng,english,enus,en-us', + '--slang=ja,jp,jpn,japanese,en,eng,english,enus,en-us', + `--input-ipc-server=${socketPath}`, + ]); + launchedWindowsMpv = launchResult.ok; + if (launchResult.ok && launchResult.mpvPath) { + deps.logInfo(`Bootstrapping Windows mpv for YouTube playback via ${launchResult.mpvPath}`); + } + if (!launchResult.ok) { + deps.logWarn('Unable to bootstrap Windows mpv for YouTube playback.'); + } + } + + const connected = await deps.waitForYoutubeMpvConnected( + launchedWindowsMpv ? deps.autoLaunchTimeoutMs : deps.connectTimeoutMs, + ); + if (!connected) { + throw new Error( + launchedWindowsMpv + ? 'MPV not connected after auto-launch. Ensure mpv is installed and can open the requested YouTube URL.' + : 'MPV not connected. Start mpv with the SubMiner profile or retry after mpv finishes starting.', + ); + } + + if (request.source === 'initial') { + quitOnDisconnectArmTimer = deps.schedule(() => { + if (playbackFlowGeneration !== flowGeneration) { + return; + } + quitOnDisconnectArmed = true; + quitOnDisconnectArmTimer = null; + }, 3000); + } + + const mediaReady = await deps.prepareYoutubePlaybackInMpv({ url: playbackUrl }); + if (!mediaReady) { + throw new Error('Timed out waiting for mpv to load the requested YouTube URL.'); + } + + await deps.runYoutubePlaybackFlow({ + url: request.url, + mode: request.mode, + }); + flowCompleted = true; + deps.logInfo(`YouTube playback flow completed from ${request.source}.`); + } finally { + if (playbackFlowGeneration === flowGeneration) { + if (!flowCompleted) { + clearYoutubePlayQuitOnDisconnectArmTimer(); + quitOnDisconnectArmed = false; + } + deps.setAppOwnedFlowInFlight(false); + } + } + }; + + return { + clearYoutubePlayQuitOnDisconnectArmTimer, + getQuitOnDisconnectArmed: (): boolean => quitOnDisconnectArmed, + runYoutubePlaybackFlow, + }; +} diff --git a/src/main/runtime/youtube-primary-subtitle-notification.ts b/src/main/runtime/youtube-primary-subtitle-notification.ts index c52804b..df07e59 100644 --- a/src/main/runtime/youtube-primary-subtitle-notification.ts +++ b/src/main/runtime/youtube-primary-subtitle-notification.ts @@ -1,7 +1,9 @@ import { isYoutubeMediaPath } from './youtube-playback'; import { normalizeYoutubeLangCode } from '../../core/services/youtube/labels'; -export type YoutubePrimarySubtitleNotificationTimer = ReturnType | { id: number }; +export type YoutubePrimarySubtitleNotificationTimer = + | ReturnType + | { id: number }; type SubtitleTrackEntry = { id: number | null; @@ -82,7 +84,9 @@ function hasSelectedPrimarySubtitle( const tracks = trackList.map(normalizeTrack); const activeTrack = - (sid === null ? null : tracks.find((track) => track?.type === 'sub' && track.id === sid) ?? null) ?? + (sid === null + ? null + : (tracks.find((track) => track?.type === 'sub' && track.id === sid) ?? null)) ?? tracks.find((track) => track?.type === 'sub' && track.selected) ?? null; if (!activeTrack) { @@ -130,7 +134,9 @@ export function createYoutubePrimarySubtitleNotificationRuntime(deps: { return; } lastReportedMediaPath = mediaPath; - deps.notifyFailure('Primary subtitle failed to download or load. Try again from the subtitle modal.'); + deps.notifyFailure( + 'Primary subtitle failed to download or load. Try again from the subtitle modal.', + ); }; const schedulePendingCheck = (): void => { @@ -150,7 +156,8 @@ export function createYoutubePrimarySubtitleNotificationRuntime(deps: { return { handleMediaPathChange: (path: string | null): void => { - const normalizedPath = typeof path === 'string' && path.trim().length > 0 ? path.trim() : null; + const normalizedPath = + typeof path === 'string' && path.trim().length > 0 ? path.trim() : null; if (currentMediaPath !== normalizedPath) { lastReportedMediaPath = null; } diff --git a/src/release-workflow.test.ts b/src/release-workflow.test.ts index a86db6a..cf217f3 100644 --- a/src/release-workflow.test.ts +++ b/src/release-workflow.test.ts @@ -36,6 +36,13 @@ test('release workflow verifies generated config examples before packaging artif assert.match(releaseWorkflow, /bun run verify:config-example/); }); +test('release quality gate runs the maintained source coverage lane and uploads lcov output', () => { + assert.match(releaseWorkflow, /name: Coverage suite \(maintained source lane\)/); + assert.match(releaseWorkflow, /run: bun run test:coverage:src/); + assert.match(releaseWorkflow, /name: Upload coverage artifact/); + assert.match(releaseWorkflow, /path: coverage\/test-src\/lcov\.info/); +}); + test('release build jobs install and cache stats dependencies before packaging', () => { assert.match(releaseWorkflow, /build-linux:[\s\S]*stats\/node_modules/); assert.match(releaseWorkflow, /build-macos:[\s\S]*stats\/node_modules/); diff --git a/src/renderer/handlers/controller-binding-capture.ts b/src/renderer/handlers/controller-binding-capture.ts index eb033b3..9e69436 100644 --- a/src/renderer/handlers/controller-binding-capture.ts +++ b/src/renderer/handlers/controller-binding-capture.ts @@ -47,7 +47,10 @@ type ControllerBindingCaptureResult = dpadDirection: ControllerDpadFallback; }; -function isActiveButton(button: ControllerButtonState | undefined, triggerDeadzone: number): boolean { +function isActiveButton( + button: ControllerButtonState | undefined, + triggerDeadzone: number, +): boolean { if (!button) return false; return Boolean(button.pressed) || button.value >= triggerDeadzone; } @@ -90,7 +93,10 @@ export function createControllerBindingCapture(options: { }); } - function arm(nextTarget: ControllerBindingCaptureTarget, snapshot: ControllerBindingCaptureSnapshot): void { + function arm( + nextTarget: ControllerBindingCaptureTarget, + snapshot: ControllerBindingCaptureSnapshot, + ): void { target = nextTarget; resetBlockedState(snapshot); } @@ -139,7 +145,10 @@ export function createControllerBindingCapture(options: { } // After dpad early-return, only 'discrete' | 'axis' remain - const narrowedTarget: Extract = target; + const narrowedTarget: Extract< + ControllerBindingCaptureTarget, + { bindingType: 'discrete' | 'axis' } + > = target; for (let index = 0; index < snapshot.buttons.length; index += 1) { if (!isActiveButton(snapshot.buttons[index], options.triggerDeadzone)) continue; diff --git a/src/renderer/handlers/keyboard.test.ts b/src/renderer/handlers/keyboard.test.ts index c4ba4c8..a197098 100644 --- a/src/renderer/handlers/keyboard.test.ts +++ b/src/renderer/handlers/keyboard.test.ts @@ -518,6 +518,26 @@ test('popup-visible mpv keybindings still fire for bound keys', async () => { } }); +test('visible-layer y-t dispatches mpv plugin toggle while overlay owns focus', async () => { + const { handlers, testGlobals } = createKeyboardHandlerHarness(); + + try { + await handlers.setupMpvInputForwarding(); + + testGlobals.dispatchKeydown({ key: 'y', code: 'KeyY' }); + testGlobals.dispatchKeydown({ key: 't', code: 'KeyT' }); + + assert.equal( + testGlobals.mpvCommands.some( + (command) => command[0] === 'script-message' && command[1] === 'subminer-toggle', + ), + true, + ); + } finally { + testGlobals.restore(); + } +}); + test('keyboard mode: controller helpers dispatch popup audio play/cycle and scroll bridge commands', async () => { const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness(); diff --git a/src/renderer/handlers/keyboard.ts b/src/renderer/handlers/keyboard.ts index 01c423e..ac4b294 100644 --- a/src/renderer/handlers/keyboard.ts +++ b/src/renderer/handlers/keyboard.ts @@ -194,13 +194,7 @@ export function createKeyboardHandlers( (isBackslashConfigured && e.key === '\\') || (toggleKey.length === 1 && e.key === toggleKey); - return ( - keyMatches && - !e.ctrlKey && - !e.altKey && - !e.metaKey && - !e.repeat - ); + return keyMatches && !e.ctrlKey && !e.altKey && !e.metaKey && !e.repeat; } function isStatsOverlayToggle(e: KeyboardEvent): boolean { diff --git a/src/renderer/handlers/mouse.test.ts b/src/renderer/handlers/mouse.test.ts index 5077ed2..6018c88 100644 --- a/src/renderer/handlers/mouse.test.ts +++ b/src/renderer/handlers/mouse.test.ts @@ -3,10 +3,7 @@ import test from 'node:test'; import type { SubtitleSidebarConfig } from '../../types'; import { createMouseHandlers } from './mouse.js'; -import { - YOMITAN_POPUP_HIDDEN_EVENT, - YOMITAN_POPUP_SHOWN_EVENT, -} from '../yomitan-popup.js'; +import { YOMITAN_POPUP_HIDDEN_EVENT, YOMITAN_POPUP_SHOWN_EVENT } from '../yomitan-popup.js'; function createClassList() { const classes = new Set(); @@ -118,9 +115,15 @@ test('secondary hover pauses on enter, reveals secondary subtitle, and resumes o }); await handlers.handleSecondaryMouseEnter(); - assert.equal(ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), true); + assert.equal( + ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), + true, + ); await handlers.handleSecondaryMouseLeave(); - assert.equal(ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), false); + assert.equal( + ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), + false, + ); assert.deepEqual(mpvCommands, [ ['set_property', 'pause', 'yes'], @@ -186,7 +189,10 @@ test('secondary leave toward primary subtitle container clears the secondary hov } as unknown as MouseEvent); assert.equal(ctx.state.isOverSubtitle, false); - assert.equal(ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), false); + assert.equal( + ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), + false, + ); assert.deepEqual(mpvCommands, [['set_property', 'pause', 'yes']]); }); @@ -237,7 +243,10 @@ test('primary hover pauses on enter without revealing secondary subtitle', async }); await handlers.handlePrimaryMouseEnter(); - assert.equal(ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), false); + assert.equal( + ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), + false, + ); await handlers.handlePrimaryMouseLeave(); assert.deepEqual(mpvCommands, [ @@ -394,7 +403,10 @@ test('restorePointerInteractionState reapplies the secondary hover class from po mousemove?.({ clientX: 10, clientY: 20 } as MouseEvent); assert.equal(ctx.state.isOverSubtitle, true); - assert.equal(ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), true); + assert.equal( + ctx.dom.secondarySubContainer.classList.contains('secondary-sub-hover-active'), + true, + ); } finally { Object.defineProperty(globalThis, 'document', { configurable: true, value: originalDocument }); Object.defineProperty(globalThis, 'window', { configurable: true, value: originalWindow }); diff --git a/src/renderer/handlers/mouse.ts b/src/renderer/handlers/mouse.ts index 52ef5ed..5c7e6e6 100644 --- a/src/renderer/handlers/mouse.ts +++ b/src/renderer/handlers/mouse.ts @@ -228,10 +228,7 @@ export function createMouseHandlers( syncOverlayMouseIgnoreState(ctx); } - async function handleMouseEnter( - _event?: MouseEvent, - showSecondaryHover = false, - ): Promise { + async function handleMouseEnter(_event?: MouseEvent, showSecondaryHover = false): Promise { ctx.state.isOverSubtitle = true; if (showSecondaryHover) { ctx.dom.secondarySubContainer.classList.add('secondary-sub-hover-active'); @@ -267,10 +264,7 @@ export function createMouseHandlers( pausedBySubtitleHover = true; } - async function handleMouseLeave( - _event?: MouseEvent, - hideSecondaryHover = false, - ): Promise { + async function handleMouseLeave(_event?: MouseEvent, hideSecondaryHover = false): Promise { const relatedTarget = _event?.relatedTarget ?? null; const otherContainer = hideSecondaryHover ? ctx.dom.subtitleContainer diff --git a/src/renderer/modals/controller-config-form.ts b/src/renderer/modals/controller-config-form.ts index 3131e6b..2b9d997 100644 --- a/src/renderer/modals/controller-config-form.ts +++ b/src/renderer/modals/controller-config-form.ts @@ -118,10 +118,14 @@ export function getDefaultControllerBinding(actionId: ControllerBindingActionId) if (!definition) { return { kind: 'none' } as const; } - return JSON.parse(JSON.stringify(definition.defaultBinding)) as ResolvedControllerConfig['bindings'][ControllerBindingActionId]; + return JSON.parse( + JSON.stringify(definition.defaultBinding), + ) as ResolvedControllerConfig['bindings'][ControllerBindingActionId]; } -export function getDefaultDpadFallback(actionId: ControllerBindingActionId): ControllerDpadFallback { +export function getDefaultDpadFallback( + actionId: ControllerBindingActionId, +): ControllerDpadFallback { const definition = getControllerBindingDefinition(actionId); if (!definition || definition.defaultBinding.kind !== 'axis') return 'none'; const binding = definition.defaultBinding; @@ -249,7 +253,11 @@ export function createControllerConfigForm(options: { if (definition.bindingType === 'axis') { renderAxisStickRow(definition, binding as ResolvedControllerAxisBinding, learningActionId); - renderAxisDpadRow(definition, binding as ResolvedControllerAxisBinding, dpadLearningActionId); + renderAxisDpadRow( + definition, + binding as ResolvedControllerAxisBinding, + dpadLearningActionId, + ); } else { renderDiscreteRow(definition, binding, learningActionId); } @@ -265,7 +273,12 @@ export function createControllerConfigForm(options: { const isExpanded = expandedRowKey === rowKey; const isLearning = learningActionId === definition.id; - const row = createRow(definition.label, formatFriendlyBindingLabel(binding), binding.kind === 'none', isExpanded); + const row = createRow( + definition.label, + formatFriendlyBindingLabel(binding), + binding.kind === 'none', + isExpanded, + ); row.addEventListener('click', () => { expandedRowKey = expandedRowKey === rowKey ? null : rowKey; render(); @@ -277,9 +290,18 @@ export function createControllerConfigForm(options: { ? 'Press a button, trigger, or move a stick\u2026' : `Currently: ${formatControllerBindingSummary(binding)}`; const panel = createEditPanel(hint, isLearning, { - onLearn: (e) => { e.stopPropagation(); options.onLearn(definition.id, definition.bindingType); }, - onClear: (e) => { e.stopPropagation(); options.onClear(definition.id); }, - onReset: (e) => { e.stopPropagation(); options.onReset(definition.id); }, + onLearn: (e) => { + e.stopPropagation(); + options.onLearn(definition.id, definition.bindingType); + }, + onClear: (e) => { + e.stopPropagation(); + options.onClear(definition.id); + }, + onReset: (e) => { + e.stopPropagation(); + options.onReset(definition.id); + }, }); options.container.appendChild(panel); } @@ -294,7 +316,12 @@ export function createControllerConfigForm(options: { const isExpanded = expandedRowKey === rowKey; const isLearning = learningActionId === definition.id; - const row = createRow(`${definition.label} (Stick)`, formatFriendlyStickLabel(binding), binding.kind === 'none', isExpanded); + const row = createRow( + `${definition.label} (Stick)`, + formatFriendlyStickLabel(binding), + binding.kind === 'none', + isExpanded, + ); row.addEventListener('click', () => { expandedRowKey = expandedRowKey === rowKey ? null : rowKey; render(); @@ -305,9 +332,18 @@ export function createControllerConfigForm(options: { const summary = binding.kind === 'none' ? 'Disabled' : `Axis ${binding.axisIndex}`; const hint = isLearning ? 'Move a stick or trigger\u2026' : `Currently: ${summary}`; const panel = createEditPanel(hint, isLearning, { - onLearn: (e) => { e.stopPropagation(); options.onLearn(definition.id, 'axis'); }, - onClear: (e) => { e.stopPropagation(); options.onClear(definition.id); }, - onReset: (e) => { e.stopPropagation(); options.onReset(definition.id); }, + onLearn: (e) => { + e.stopPropagation(); + options.onLearn(definition.id, 'axis'); + }, + onClear: (e) => { + e.stopPropagation(); + options.onClear(definition.id); + }, + onReset: (e) => { + e.stopPropagation(); + options.onReset(definition.id); + }, }); options.container.appendChild(panel); } @@ -322,9 +358,15 @@ export function createControllerConfigForm(options: { const isExpanded = expandedRowKey === rowKey; const isLearning = dpadLearningActionId === definition.id; - const dpadFallback: ControllerDpadFallback = binding.kind === 'none' ? 'none' : binding.dpadFallback; + const dpadFallback: ControllerDpadFallback = + binding.kind === 'none' ? 'none' : binding.dpadFallback; const badgeText = DPAD_FALLBACK_LABELS[dpadFallback]; - const row = createRow(`${definition.label} (D-pad)`, badgeText, dpadFallback === 'none', isExpanded); + const row = createRow( + `${definition.label} (D-pad)`, + badgeText, + dpadFallback === 'none', + isExpanded, + ); row.addEventListener('click', () => { expandedRowKey = expandedRowKey === rowKey ? null : rowKey; render(); @@ -336,15 +378,29 @@ export function createControllerConfigForm(options: { ? 'Press a D-pad direction\u2026' : `Currently: ${DPAD_FALLBACK_LABELS[dpadFallback]}`; const panel = createEditPanel(hint, isLearning, { - onLearn: (e) => { e.stopPropagation(); options.onDpadLearn(definition.id); }, - onClear: (e) => { e.stopPropagation(); options.onDpadClear(definition.id); }, - onReset: (e) => { e.stopPropagation(); options.onDpadReset(definition.id); }, + onLearn: (e) => { + e.stopPropagation(); + options.onDpadLearn(definition.id); + }, + onClear: (e) => { + e.stopPropagation(); + options.onDpadClear(definition.id); + }, + onReset: (e) => { + e.stopPropagation(); + options.onDpadReset(definition.id); + }, }); options.container.appendChild(panel); } } - function createRow(labelText: string, badgeText: string, isDisabled: boolean, isExpanded: boolean): HTMLDivElement { + function createRow( + labelText: string, + badgeText: string, + isDisabled: boolean, + isExpanded: boolean, + ): HTMLDivElement { const row = document.createElement('div'); row.className = 'controller-config-row'; if (isExpanded) row.classList.add('expanded'); diff --git a/src/renderer/modals/controller-select.test.ts b/src/renderer/modals/controller-select.test.ts index 93086bb..ef64fce 100644 --- a/src/renderer/modals/controller-select.test.ts +++ b/src/renderer/modals/controller-select.test.ts @@ -66,7 +66,10 @@ function createFakeElement() { if (!match) return null; const testId = match[1]; for (const child of el.children) { - if (typeof child.getAttribute === 'function' && child.getAttribute('data-testid') === testId) { + if ( + typeof child.getAttribute === 'function' && + child.getAttribute('data-testid') === testId + ) { return child; } if (typeof child.querySelector === 'function') { @@ -105,7 +108,10 @@ function installFakeDom() { return { restore: () => { Object.defineProperty(globalThis, 'window', { configurable: true, value: previousWindow }); - Object.defineProperty(globalThis, 'document', { configurable: true, value: previousDocument }); + Object.defineProperty(globalThis, 'document', { + configurable: true, + value: previousDocument, + }); }, }; } diff --git a/src/renderer/modals/controller-select.ts b/src/renderer/modals/controller-select.ts index a1f0882..a3bba8c 100644 --- a/src/renderer/modals/controller-select.ts +++ b/src/renderer/modals/controller-select.ts @@ -31,8 +31,9 @@ export function createControllerSelectModal( let lastRenderedActiveGamepadId: string | null = null; let lastRenderedPreferredId = ''; type ControllerBindingKey = keyof NonNullable['bindings']; - type ControllerBindingValue = - NonNullable['bindings']>[ControllerBindingKey]; + type ControllerBindingValue = NonNullable< + NonNullable['bindings'] + >[ControllerBindingKey]; let learningActionId: ControllerBindingKey | null = null; let dpadLearningActionId: ControllerBindingKey | null = null; let bindingCapture: ReturnType | null = null; @@ -198,7 +199,9 @@ export function createControllerSelectModal( lastRenderedPreferredId = preferredId; } - async function saveControllerConfig(update: Parameters[0]) { + async function saveControllerConfig( + update: Parameters[0], + ) { await window.electronAPI.saveControllerConfig(update); if (!ctx.state.controllerConfig) return; if (update.preferredGamepadId !== undefined) { @@ -304,7 +307,10 @@ export function createControllerSelectModal( if (result.bindingType === 'dpad') { void saveDpadFallback(result.actionId as ControllerBindingKey, result.dpadDirection); } else { - void saveBinding(result.actionId as ControllerBindingKey, result.binding as ControllerBindingValue); + void saveBinding( + result.actionId as ControllerBindingKey, + result.binding as ControllerBindingValue, + ); } } } diff --git a/src/renderer/modals/subtitle-sidebar.test.ts b/src/renderer/modals/subtitle-sidebar.test.ts index 80822f6..204ab66 100644 --- a/src/renderer/modals/subtitle-sidebar.test.ts +++ b/src/renderer/modals/subtitle-sidebar.test.ts @@ -90,10 +90,7 @@ test('findActiveSubtitleCueIndex prefers current subtitle timing over near-futur { startTime: 233.05, endTime: 236, text: 'next' }, ]; - assert.equal( - findActiveSubtitleCueIndex(cues, { text: 'previous', startTime: 231 }, 233, 0), - 0, - ); + assert.equal(findActiveSubtitleCueIndex(cues, { text: 'previous', startTime: 231 }, 233, 0), 0); }); test('subtitle sidebar modal opens from snapshot and clicking cue seeks playback', async () => { @@ -1217,10 +1214,22 @@ test('subtitle sidebar polling schedules serialized timeouts instead of interval assert.equal(timeoutCount > 0, true); assert.equal(intervalCount, 0); } finally { - Object.defineProperty(globalThis, 'setTimeout', { configurable: true, value: previousSetTimeout }); - Object.defineProperty(globalThis, 'clearTimeout', { configurable: true, value: previousClearTimeout }); - Object.defineProperty(globalThis, 'setInterval', { configurable: true, value: previousSetInterval }); - Object.defineProperty(globalThis, 'clearInterval', { configurable: true, value: previousClearInterval }); + Object.defineProperty(globalThis, 'setTimeout', { + configurable: true, + value: previousSetTimeout, + }); + Object.defineProperty(globalThis, 'clearTimeout', { + configurable: true, + value: previousClearTimeout, + }); + Object.defineProperty(globalThis, 'setInterval', { + configurable: true, + value: previousSetInterval, + }); + Object.defineProperty(globalThis, 'clearInterval', { + configurable: true, + value: previousClearInterval, + }); Object.defineProperty(globalThis, 'window', { configurable: true, value: previousWindow }); Object.defineProperty(globalThis, 'document', { configurable: true, value: previousDocument }); } @@ -1232,6 +1241,7 @@ test('subtitle sidebar closes and resumes a hover pause', async () => { const previousDocument = globals.document; const mpvCommands: Array> = []; const modalListeners = new Map void>>(); + const contentListeners = new Map void>>(); const snapshot: SubtitleSidebarSnapshot = { cues: [{ startTime: 1, endTime: 2, text: 'first' }], @@ -1308,6 +1318,11 @@ test('subtitle sidebar closes and resumes a hover pause', async () => { subtitleSidebarContent: { classList: createClassList(), getBoundingClientRect: () => ({ width: 420 }), + addEventListener: (type: string, listener: () => void) => { + const bucket = contentListeners.get(type) ?? []; + bucket.push(listener); + contentListeners.set(type, bucket); + }, }, subtitleSidebarClose: { addEventListener: () => {} }, subtitleSidebarStatus: { textContent: '' }, @@ -1324,7 +1339,7 @@ test('subtitle sidebar closes and resumes a hover pause', async () => { await modal.openSubtitleSidebarModal(); await modal.refreshSubtitleSidebarSnapshot(); mpvCommands.length = 0; - await modalListeners.get('mouseenter')?.[0]?.(); + await contentListeners.get('mouseenter')?.[0]?.(); assert.deepEqual(mpvCommands.at(-1), ['set_property', 'pause', 'yes']); @@ -1344,6 +1359,7 @@ test('subtitle sidebar hover pause ignores playback-state IPC failures', async ( const previousDocument = globals.document; const mpvCommands: Array> = []; const modalListeners = new Map Promise | void>>(); + const contentListeners = new Map Promise | void>>(); const snapshot: SubtitleSidebarSnapshot = { cues: [{ startTime: 1, endTime: 2, text: 'first' }], @@ -1422,6 +1438,11 @@ test('subtitle sidebar hover pause ignores playback-state IPC failures', async ( subtitleSidebarContent: { classList: createClassList(), getBoundingClientRect: () => ({ width: 420 }), + addEventListener: (type: string, listener: () => Promise | void) => { + const bucket = contentListeners.get(type) ?? []; + bucket.push(listener); + contentListeners.set(type, bucket); + }, }, subtitleSidebarClose: { addEventListener: () => {} }, subtitleSidebarStatus: { textContent: '' }, @@ -1437,7 +1458,7 @@ test('subtitle sidebar hover pause ignores playback-state IPC failures', async ( await modal.openSubtitleSidebarModal(); await assert.doesNotReject(async () => { - await modalListeners.get('mouseenter')?.[0]?.(); + await contentListeners.get('mouseenter')?.[0]?.(); }); assert.equal(state.subtitleSidebarPausedByHover, false); @@ -1564,17 +1585,13 @@ test('subtitle sidebar embedded layout reserves and releases mpv right margin', assert.ok( mpvCommands.some( (command) => - command[0] === 'set_property' && - command[1] === 'osd-align-x' && - command[2] === 'left', + command[0] === 'set_property' && command[1] === 'osd-align-x' && command[2] === 'left', ), ); assert.ok( mpvCommands.some( (command) => - command[0] === 'set_property' && - command[1] === 'osd-align-y' && - command[2] === 'top', + command[0] === 'set_property' && command[1] === 'osd-align-y' && command[2] === 'top', ), ); assert.ok( @@ -1597,7 +1614,11 @@ test('subtitle sidebar embedded layout reserves and releases mpv right margin', assert.deepEqual(mpvCommands.at(-5), ['set_property', 'video-margin-ratio-right', 0]); assert.deepEqual(mpvCommands.at(-4), ['set_property', 'osd-align-x', 'left']); assert.deepEqual(mpvCommands.at(-3), ['set_property', 'osd-align-y', 'top']); - assert.deepEqual(mpvCommands.at(-2), ['set_property', 'user-data/osc/margins', '{"l":0,"r":0,"t":0,"b":0}']); + assert.deepEqual(mpvCommands.at(-2), [ + 'set_property', + 'user-data/osc/margins', + '{"l":0,"r":0,"t":0,"b":0}', + ]); assert.deepEqual(mpvCommands.at(-1), ['set_property', 'video-pan-x', 0]); assert.equal(bodyClassList.contains('subtitle-sidebar-embedded-open'), false); assert.deepEqual(rootStyleCalls.at(-1), ['--subtitle-sidebar-reserved-width', '0px']); @@ -1735,6 +1756,7 @@ test('subtitle sidebar embedded layout restores macOS and Windows passthrough ou const mpvCommands: Array> = []; const ignoreMouseCalls: Array<[boolean, { forward?: boolean } | undefined]> = []; const modalListeners = new Map void>>(); + const contentListeners = new Map void>>(); const snapshot: SubtitleSidebarSnapshot = { cues: [{ startTime: 1, endTime: 2, text: 'first' }], @@ -1814,6 +1836,11 @@ test('subtitle sidebar embedded layout restores macOS and Windows passthrough ou subtitleSidebarContent: { classList: createClassList(), getBoundingClientRect: () => ({ width: 360 }), + addEventListener: (type: string, listener: () => void) => { + const bucket = contentListeners.get(type) ?? []; + bucket.push(listener); + contentListeners.set(type, bucket); + }, }, subtitleSidebarClose: { addEventListener: () => {} }, subtitleSidebarStatus: { textContent: '' }, @@ -1833,15 +1860,15 @@ test('subtitle sidebar embedded layout restores macOS and Windows passthrough ou await modal.openSubtitleSidebarModal(); assert.deepEqual(ignoreMouseCalls.at(-1), [true, { forward: true }]); - modalListeners.get('mouseenter')?.[0]?.(); + contentListeners.get('mouseenter')?.[0]?.(); assert.deepEqual(ignoreMouseCalls.at(-1), [false, undefined]); - modalListeners.get('mouseleave')?.[0]?.(); + contentListeners.get('mouseleave')?.[0]?.(); assert.deepEqual(ignoreMouseCalls.at(-1), [true, { forward: true }]); state.isOverSubtitle = true; - modalListeners.get('mouseenter')?.[0]?.(); - modalListeners.get('mouseleave')?.[0]?.(); + contentListeners.get('mouseenter')?.[0]?.(); + contentListeners.get('mouseleave')?.[0]?.(); assert.deepEqual(ignoreMouseCalls.at(-1), [false, undefined]); void mpvCommands; @@ -1851,6 +1878,251 @@ test('subtitle sidebar embedded layout restores macOS and Windows passthrough ou } }); +test('subtitle sidebar overlay layout restores macOS and Windows passthrough outside sidebar hover', async () => { + const globals = globalThis as typeof globalThis & { window?: unknown; document?: unknown }; + const previousWindow = globals.window; + const previousDocument = globals.document; + const mpvCommands: Array> = []; + const ignoreMouseCalls: Array<[boolean, { forward?: boolean } | undefined]> = []; + const modalListeners = new Map void>>(); + const contentListeners = new Map void>>(); + + const snapshot: SubtitleSidebarSnapshot = { + cues: [{ startTime: 1, endTime: 2, text: 'first' }], + currentSubtitle: { + text: 'first', + startTime: 1, + endTime: 2, + }, + currentTimeSec: 1.1, + config: { + enabled: true, + autoOpen: false, + layout: 'overlay', + toggleKey: 'Backslash', + pauseVideoOnHover: false, + autoScroll: true, + maxWidth: 360, + opacity: 0.92, + backgroundColor: 'rgba(54, 58, 79, 0.88)', + textColor: '#cad3f5', + fontFamily: '"Iosevka Aile", sans-serif', + fontSize: 17, + timestampColor: '#a5adcb', + activeLineColor: '#f5bde6', + activeLineBackgroundColor: 'rgba(138, 173, 244, 0.22)', + hoverLineBackgroundColor: 'rgba(54, 58, 79, 0.84)', + }, + }; + + Object.defineProperty(globalThis, 'window', { + configurable: true, + value: { + innerWidth: 1200, + electronAPI: { + getSubtitleSidebarSnapshot: async () => snapshot, + sendMpvCommand: (command: Array) => { + mpvCommands.push(command); + }, + setIgnoreMouseEvents: (ignore: boolean, options?: { forward?: boolean }) => { + ignoreMouseCalls.push([ignore, options]); + }, + } as unknown as ElectronAPI, + addEventListener: () => {}, + removeEventListener: () => {}, + }, + }); + Object.defineProperty(globalThis, 'document', { + configurable: true, + value: { + createElement: () => createCueRow(), + body: { + classList: createClassList(), + }, + documentElement: { + style: { + setProperty: () => {}, + }, + }, + }, + }); + + try { + const state = createRendererState(); + const ctx = { + dom: { + overlay: { classList: createClassList() }, + subtitleSidebarModal: { + classList: createClassList(['hidden']), + setAttribute: () => {}, + style: { setProperty: () => {} }, + addEventListener: (type: string, listener: () => void) => { + const bucket = modalListeners.get(type) ?? []; + bucket.push(listener); + modalListeners.set(type, bucket); + }, + }, + subtitleSidebarContent: { + classList: createClassList(), + getBoundingClientRect: () => ({ width: 360 }), + addEventListener: (type: string, listener: () => void) => { + const bucket = contentListeners.get(type) ?? []; + bucket.push(listener); + contentListeners.set(type, bucket); + }, + }, + subtitleSidebarClose: { addEventListener: () => {} }, + subtitleSidebarStatus: { textContent: '' }, + subtitleSidebarList: createListStub(), + }, + platform: { + shouldToggleMouseIgnore: true, + }, + state, + }; + + const modal = createSubtitleSidebarModal(ctx as never, { + modalStateReader: { isAnyModalOpen: () => false }, + }); + modal.wireDomEvents(); + + assert.equal(modalListeners.get('mouseenter')?.length ?? 0, 0); + assert.equal(modalListeners.get('mouseleave')?.length ?? 0, 0); + assert.equal(contentListeners.get('mouseenter')?.length ?? 0, 1); + assert.equal(contentListeners.get('mouseleave')?.length ?? 0, 1); + + await modal.openSubtitleSidebarModal(); + assert.deepEqual(ignoreMouseCalls.at(-1), [true, { forward: true }]); + + contentListeners.get('mouseenter')?.[0]?.(); + assert.deepEqual(ignoreMouseCalls.at(-1), [false, undefined]); + + contentListeners.get('mouseleave')?.[0]?.(); + assert.deepEqual(ignoreMouseCalls.at(-1), [true, { forward: true }]); + + void mpvCommands; + } finally { + Object.defineProperty(globalThis, 'window', { configurable: true, value: previousWindow }); + Object.defineProperty(globalThis, 'document', { configurable: true, value: previousDocument }); + } +}); + +test('subtitle sidebar overlay layout only stays interactive while focus remains inside the sidebar panel', async () => { + const globals = globalThis as typeof globalThis & { window?: unknown; document?: unknown }; + const previousWindow = globals.window; + const previousDocument = globals.document; + const ignoreMouseCalls: Array<[boolean, { forward?: boolean } | undefined]> = []; + const contentListeners = new Map void>>(); + + const snapshot: SubtitleSidebarSnapshot = { + cues: [{ startTime: 1, endTime: 2, text: 'first' }], + currentSubtitle: { + text: 'first', + startTime: 1, + endTime: 2, + }, + currentTimeSec: 1.1, + config: { + enabled: true, + autoOpen: false, + layout: 'overlay', + toggleKey: 'Backslash', + pauseVideoOnHover: false, + autoScroll: true, + maxWidth: 360, + opacity: 0.92, + backgroundColor: 'rgba(54, 58, 79, 0.88)', + textColor: '#cad3f5', + fontFamily: '"Iosevka Aile", sans-serif', + fontSize: 17, + timestampColor: '#a5adcb', + activeLineColor: '#f5bde6', + activeLineBackgroundColor: 'rgba(138, 173, 244, 0.22)', + hoverLineBackgroundColor: 'rgba(54, 58, 79, 0.84)', + }, + }; + + Object.defineProperty(globalThis, 'window', { + configurable: true, + value: { + innerWidth: 1200, + electronAPI: { + getSubtitleSidebarSnapshot: async () => snapshot, + sendMpvCommand: () => {}, + setIgnoreMouseEvents: (ignore: boolean, options?: { forward?: boolean }) => { + ignoreMouseCalls.push([ignore, options]); + }, + } as unknown as ElectronAPI, + addEventListener: () => {}, + removeEventListener: () => {}, + }, + }); + Object.defineProperty(globalThis, 'document', { + configurable: true, + value: { + createElement: () => createCueRow(), + body: { + classList: createClassList(), + }, + documentElement: { + style: { + setProperty: () => {}, + }, + }, + }, + }); + + try { + const state = createRendererState(); + const sidebarContent = { + classList: createClassList(), + getBoundingClientRect: () => ({ width: 360 }), + addEventListener: (type: string, listener: (event?: FocusEvent) => void) => { + const bucket = contentListeners.get(type) ?? []; + bucket.push(listener); + contentListeners.set(type, bucket); + }, + contains: () => false, + }; + const ctx = { + dom: { + overlay: { classList: createClassList() }, + subtitleSidebarModal: { + classList: createClassList(['hidden']), + setAttribute: () => {}, + style: { setProperty: () => {} }, + addEventListener: () => {}, + }, + subtitleSidebarContent: sidebarContent, + subtitleSidebarClose: { addEventListener: () => {} }, + subtitleSidebarStatus: { textContent: '' }, + subtitleSidebarList: createListStub(), + }, + platform: { + shouldToggleMouseIgnore: true, + }, + state, + }; + + const modal = createSubtitleSidebarModal(ctx as never, { + modalStateReader: { isAnyModalOpen: () => false }, + }); + modal.wireDomEvents(); + + await modal.openSubtitleSidebarModal(); + assert.deepEqual(ignoreMouseCalls.at(-1), [true, { forward: true }]); + + contentListeners.get('focusin')?.[0]?.(); + assert.deepEqual(ignoreMouseCalls.at(-1), [false, undefined]); + + contentListeners.get('focusout')?.[0]?.({ relatedTarget: null } as FocusEvent); + assert.deepEqual(ignoreMouseCalls.at(-1), [true, { forward: true }]); + } finally { + Object.defineProperty(globalThis, 'window', { configurable: true, value: previousWindow }); + Object.defineProperty(globalThis, 'document', { configurable: true, value: previousDocument }); + } +}); + test('closing embedded subtitle sidebar recomputes passthrough from remaining subtitle hover state', async () => { const globals = globalThis as typeof globalThis & { window?: unknown; document?: unknown }; const previousWindow = globals.window; diff --git a/src/renderer/modals/subtitle-sidebar.ts b/src/renderer/modals/subtitle-sidebar.ts index f12618b..d381606 100644 --- a/src/renderer/modals/subtitle-sidebar.ts +++ b/src/renderer/modals/subtitle-sidebar.ts @@ -1,8 +1,4 @@ -import type { - SubtitleCue, - SubtitleData, - SubtitleSidebarSnapshot, -} from '../../types'; +import type { SubtitleCue, SubtitleData, SubtitleSidebarSnapshot } from '../../types'; import type { ModalStateReader, RendererContext } from '../context'; import { syncOverlayMouseIgnoreState } from '../overlay-mouse-ignore.js'; @@ -76,8 +72,7 @@ export function findActiveSubtitleCueIndex( if (typeof currentTimeSec === 'number' && Number.isFinite(currentTimeSec)) { const activeOrUpcomingCue = cues.findIndex( (cue) => - cue.endTime > currentTimeSec && - cue.startTime <= currentTimeSec + ACTIVE_CUE_LOOKAHEAD_SEC, + cue.endTime > currentTimeSec && cue.startTime <= currentTimeSec + ACTIVE_CUE_LOOKAHEAD_SEC, ); if (activeOrUpcomingCue >= 0) { return activeOrUpcomingCue; @@ -109,8 +104,7 @@ export function findActiveSubtitleCueIndex( return -1; } - const hasTiming = - typeof current.startTime === 'number' && Number.isFinite(current.startTime); + const hasTiming = typeof current.startTime === 'number' && Number.isFinite(current.startTime); if (preferredCueIndex >= 0) { if (!hasTiming && currentTimeSec === null) { @@ -149,11 +143,23 @@ export function createSubtitleSidebarModal( let lastAppliedVideoMarginRatio: number | null = null; let subtitleSidebarHoverRequestId = 0; let disposeDomEvents: (() => void) | null = null; + let subtitleSidebarHovered = false; + let subtitleSidebarFocusedWithin = false; function restoreEmbeddedSidebarPassthrough(): void { syncOverlayMouseIgnoreState(ctx); } + function syncSidebarInteractionState(): void { + ctx.state.isOverSubtitleSidebar = subtitleSidebarHovered || subtitleSidebarFocusedWithin; + } + + function clearSidebarInteractionState(): void { + subtitleSidebarHovered = false; + subtitleSidebarFocusedWithin = false; + syncSidebarInteractionState(); + } + function setStatus(message: string): void { ctx.dom.subtitleSidebarStatus.textContent = message; } @@ -213,16 +219,8 @@ export function createSubtitleSidebarModal( 'video-margin-ratio-right', Number(ratio.toFixed(4)), ]); - window.electronAPI.sendMpvCommand([ - 'set_property', - 'osd-align-x', - 'left', - ]); - window.electronAPI.sendMpvCommand([ - 'set_property', - 'osd-align-y', - 'top', - ]); + window.electronAPI.sendMpvCommand(['set_property', 'osd-align-x', 'left']); + window.electronAPI.sendMpvCommand(['set_property', 'osd-align-y', 'top']); window.electronAPI.sendMpvCommand([ 'set_property', 'user-data/osc/margins', @@ -302,13 +300,14 @@ export function createSubtitleSidebarModal( } const list = ctx.dom.subtitleSidebarList; - const active = list.children[ctx.state.subtitleSidebarActiveCueIndex] as HTMLElement | undefined; + const active = list.children[ctx.state.subtitleSidebarActiveCueIndex] as + | HTMLElement + | undefined; if (!active) { return; } - const targetScrollTop = - active.offsetTop - (list.clientHeight - active.clientHeight) / 2; + const targetScrollTop = active.offsetTop - (list.clientHeight - active.clientHeight) / 2; const nextScrollTop = Math.max(0, targetScrollTop); if (previousActiveCueIndex < 0) { list.scrollTop = nextScrollTop; @@ -363,9 +362,9 @@ export function createSubtitleSidebarModal( } if (ctx.state.subtitleSidebarActiveCueIndex >= 0) { - const current = ctx.dom.subtitleSidebarList.children[ctx.state.subtitleSidebarActiveCueIndex] as - | HTMLElement - | undefined; + const current = ctx.dom.subtitleSidebarList.children[ + ctx.state.subtitleSidebarActiveCueIndex + ] as HTMLElement | undefined; current?.classList.add('active'); } } @@ -392,6 +391,7 @@ export function createSubtitleSidebarModal( applyConfig(snapshot); if (!snapshot.config.enabled) { resumeSubtitleSidebarHoverPause(); + clearSidebarInteractionState(); ctx.state.subtitleSidebarCues = []; ctx.state.subtitleSidebarModalOpen = false; ctx.dom.subtitleSidebarModal.classList.add('hidden'); @@ -463,7 +463,7 @@ export function createSubtitleSidebarModal( } ctx.state.subtitleSidebarModalOpen = true; - ctx.state.isOverSubtitleSidebar = false; + clearSidebarInteractionState(); ctx.dom.subtitleSidebarModal.classList.remove('hidden'); ctx.dom.subtitleSidebarModal.setAttribute('aria-hidden', 'false'); renderCueList(); @@ -476,7 +476,11 @@ export function createSubtitleSidebarModal( async function autoOpenSubtitleSidebarOnStartup(): Promise { const snapshot = await refreshSnapshot(); - if (!snapshot.config.enabled || !snapshot.config.autoOpen || ctx.state.subtitleSidebarModalOpen) { + if ( + !snapshot.config.enabled || + !snapshot.config.autoOpen || + ctx.state.subtitleSidebarModalOpen + ) { return; } await openSubtitleSidebarModal(); @@ -487,7 +491,7 @@ export function createSubtitleSidebarModal( return; } resumeSubtitleSidebarHoverPause(); - ctx.state.isOverSubtitleSidebar = false; + clearSidebarInteractionState(); ctx.state.subtitleSidebarModalOpen = false; ctx.dom.subtitleSidebarModal.classList.add('hidden'); ctx.dom.subtitleSidebarModal.setAttribute('aria-hidden', 'true'); @@ -512,10 +516,7 @@ export function createSubtitleSidebarModal( return; } - updateActiveCue( - { text: data.text, startTime: data.startTime }, - data.startTime ?? null, - ); + updateActiveCue({ text: data.text, startTime: data.startTime }, data.startTime ?? null); } function wireDomEvents(): void { @@ -548,8 +549,9 @@ export function createSubtitleSidebarModal( ctx.dom.subtitleSidebarList.addEventListener('wheel', () => { ctx.state.subtitleSidebarManualScrollUntilMs = Date.now() + MANUAL_SCROLL_HOLD_MS; }); - ctx.dom.subtitleSidebarModal.addEventListener('mouseenter', async () => { - ctx.state.isOverSubtitleSidebar = true; + ctx.dom.subtitleSidebarContent.addEventListener('mouseenter', async () => { + subtitleSidebarHovered = true; + syncSidebarInteractionState(); restoreEmbeddedSidebarPassthrough(); if (!ctx.state.subtitleSidebarPauseVideoOnHover || ctx.state.subtitleSidebarPausedByHover) { return; @@ -569,8 +571,36 @@ export function createSubtitleSidebarModal( ctx.state.subtitleSidebarPausedByHover = true; } }); - ctx.dom.subtitleSidebarModal.addEventListener('mouseleave', () => { - ctx.state.isOverSubtitleSidebar = false; + ctx.dom.subtitleSidebarContent.addEventListener('mouseleave', () => { + subtitleSidebarHovered = false; + syncSidebarInteractionState(); + if (ctx.state.isOverSubtitleSidebar) { + restoreEmbeddedSidebarPassthrough(); + return; + } + resumeSubtitleSidebarHoverPause(); + }); + ctx.dom.subtitleSidebarContent.addEventListener('focusin', () => { + subtitleSidebarFocusedWithin = true; + syncSidebarInteractionState(); + restoreEmbeddedSidebarPassthrough(); + }); + ctx.dom.subtitleSidebarContent.addEventListener('focusout', (event: FocusEvent) => { + const relatedTarget = event.relatedTarget; + if ( + typeof Node !== 'undefined' && + relatedTarget instanceof Node && + ctx.dom.subtitleSidebarContent.contains(relatedTarget) + ) { + return; + } + + subtitleSidebarFocusedWithin = false; + syncSidebarInteractionState(); + if (ctx.state.isOverSubtitleSidebar) { + restoreEmbeddedSidebarPassthrough(); + return; + } resumeSubtitleSidebarHoverPause(); }); const resizeHandler = () => { diff --git a/src/renderer/modals/youtube-track-picker.ts b/src/renderer/modals/youtube-track-picker.ts index ddb3b1d..ecbcbfd 100644 --- a/src/renderer/modals/youtube-track-picker.ts +++ b/src/renderer/modals/youtube-track-picker.ts @@ -28,13 +28,13 @@ export function createYoutubeTrackPickerModal( function setStatus(message: string, isError = false): void { ctx.state.youtubePickerStatus = message; ctx.dom.youtubePickerStatus.textContent = message; - ctx.dom.youtubePickerStatus.style.color = isError - ? '#ed8796' - : '#a5adcb'; + ctx.dom.youtubePickerStatus.style.color = isError ? '#ed8796' : '#a5adcb'; } function getTrackLabel(trackId: string): string { - return ctx.state.youtubePickerPayload?.tracks.find((track) => track.id === trackId)?.label ?? ''; + return ( + ctx.state.youtubePickerPayload?.tracks.find((track) => track.id === trackId)?.label ?? '' + ); } function renderTrackList(): void { @@ -82,10 +82,7 @@ export function createYoutubeTrackPickerModal( if (track.id === primaryTrackId) continue; ctx.dom.youtubePickerSecondarySelect.appendChild(createOption(track.id, track.label)); } - if ( - primaryTrackId && - ctx.dom.youtubePickerSecondarySelect.value === primaryTrackId - ) { + if (primaryTrackId && ctx.dom.youtubePickerSecondarySelect.value === primaryTrackId) { ctx.dom.youtubePickerSecondarySelect.value = ''; } } @@ -126,7 +123,9 @@ export function createYoutubeTrackPickerModal( setStatus('Select the subtitle tracks to download.'); } - async function resolveSelection(action: 'use-selected' | 'continue-without-subtitles'): Promise { + async function resolveSelection( + action: 'use-selected' | 'continue-without-subtitles', + ): Promise { if (resolveSelectionInFlight) { return; } @@ -238,7 +237,9 @@ export function createYoutubeTrackPickerModal( return true; } void resolveSelection( - payloadHasTracks(ctx.state.youtubePickerPayload) ? 'use-selected' : 'continue-without-subtitles', + payloadHasTracks(ctx.state.youtubePickerPayload) + ? 'use-selected' + : 'continue-without-subtitles', ); return true; } @@ -269,7 +270,9 @@ export function createYoutubeTrackPickerModal( ctx.dom.youtubePickerContinueButton.addEventListener('click', () => { void resolveSelection( - payloadHasTracks(ctx.state.youtubePickerPayload) ? 'use-selected' : 'continue-without-subtitles', + payloadHasTracks(ctx.state.youtubePickerPayload) + ? 'use-selected' + : 'continue-without-subtitles', ); }); diff --git a/src/renderer/overlay-mouse-ignore.ts b/src/renderer/overlay-mouse-ignore.ts index 683392a..401277a 100644 --- a/src/renderer/overlay-mouse-ignore.ts +++ b/src/renderer/overlay-mouse-ignore.ts @@ -2,19 +2,15 @@ import type { RendererContext } from './context'; import type { RendererState } from './state'; function isBlockingOverlayModalOpen(state: RendererState): boolean { - const embeddedSidebarOpen = - state.subtitleSidebarModalOpen && state.subtitleSidebarConfig?.layout === 'embedded'; - return Boolean( state.controllerSelectModalOpen || - state.controllerDebugModalOpen || - state.jimakuModalOpen || - state.youtubePickerModalOpen || - state.kikuModalOpen || - state.runtimeOptionsModalOpen || - state.subsyncModalOpen || - state.sessionHelpModalOpen || - (state.subtitleSidebarModalOpen && !embeddedSidebarOpen), + state.controllerDebugModalOpen || + state.jimakuModalOpen || + state.youtubePickerModalOpen || + state.kikuModalOpen || + state.runtimeOptionsModalOpen || + state.subsyncModalOpen || + state.sessionHelpModalOpen, ); } diff --git a/src/renderer/renderer.ts b/src/renderer/renderer.ts index 20633f4..48a445a 100644 --- a/src/renderer/renderer.ts +++ b/src/renderer/renderer.ts @@ -552,8 +552,14 @@ async function init(): Promise { ctx.dom.subtitleContainer.addEventListener('mouseenter', mouseHandlers.handlePrimaryMouseEnter); ctx.dom.subtitleContainer.addEventListener('mouseleave', mouseHandlers.handlePrimaryMouseLeave); - ctx.dom.secondarySubContainer.addEventListener('mouseenter', mouseHandlers.handleSecondaryMouseEnter); - ctx.dom.secondarySubContainer.addEventListener('mouseleave', mouseHandlers.handleSecondaryMouseLeave); + ctx.dom.secondarySubContainer.addEventListener( + 'mouseenter', + mouseHandlers.handleSecondaryMouseEnter, + ); + ctx.dom.secondarySubContainer.addEventListener( + 'mouseleave', + mouseHandlers.handleSecondaryMouseLeave, + ); mouseHandlers.setupResizeHandler(); mouseHandlers.setupPointerTracking(); diff --git a/src/renderer/style.css b/src/renderer/style.css index ff93ca7..79d4d77 100644 --- a/src/renderer/style.css +++ b/src/renderer/style.css @@ -296,7 +296,7 @@ body { .youtube-picker-content { width: min(820px, 92%); background: - radial-gradient(circle at top right, rgba(198, 160, 246, 0.10), transparent 34%), + radial-gradient(circle at top right, rgba(198, 160, 246, 0.1), transparent 34%), linear-gradient(180deg, rgba(36, 39, 58, 0.98), rgba(30, 32, 48, 0.98)); border-color: rgba(138, 173, 244, 0.25); } @@ -1342,8 +1342,14 @@ iframe[id^='yomitan-popup'] { } @keyframes configEditSlideIn { - from { max-height: 0; opacity: 0; } - to { max-height: 120px; opacity: 1; } + from { + max-height: 0; + opacity: 0; + } + to { + max-height: 120px; + opacity: 1; + } } .controller-config-edit-inner { @@ -1365,8 +1371,13 @@ iframe[id^='yomitan-popup'] { } @keyframes configLearnPulse { - 0%, 100% { opacity: 1; } - 50% { opacity: 0.6; } + 0%, + 100% { + opacity: 1; + } + 50% { + opacity: 0.6; + } } .controller-config-edit-actions { @@ -1404,7 +1415,9 @@ iframe[id^='yomitan-popup'] { color: #6e738d; font-size: 12px; cursor: pointer; - transition: background 120ms ease, color 120ms ease; + transition: + background 120ms ease, + color 120ms ease; } .btn-secondary:hover { @@ -1497,14 +1510,13 @@ body.subtitle-sidebar-embedded-open .subtitle-sidebar-modal { max-height: calc(100vh - 28px); height: auto; margin-left: auto; - font-family: - var( - --subtitle-sidebar-font-family, - 'M PLUS 1', - 'Noto Sans CJK JP', - 'Hiragino Sans', - sans-serif - ); + font-family: var( + --subtitle-sidebar-font-family, + 'M PLUS 1', + 'Noto Sans CJK JP', + 'Hiragino Sans', + sans-serif + ); font-size: var(--subtitle-sidebar-font-size, 16px); background: var(--subtitle-sidebar-background-color, rgba(73, 77, 100, 0.9)); color: var(--subtitle-sidebar-text-color, #cad3f5); diff --git a/src/renderer/subtitle-render.test.ts b/src/renderer/subtitle-render.test.ts index f78c985..ef80f02 100644 --- a/src/renderer/subtitle-render.test.ts +++ b/src/renderer/subtitle-render.test.ts @@ -981,18 +981,9 @@ test('JLPT CSS rules use underline-only styling in renderer stylesheet', () => { cssText, 'body.subtitle-sidebar-embedded-open #secondarySubContainer.secondary-sub-hover', ); - assert.match( - secondaryEmbeddedHoverBlock, - /right:\s*var\(--subtitle-sidebar-reserved-width\);/, - ); - assert.match( - secondaryEmbeddedHoverBlock, - /max-width:\s*none;/, - ); - assert.match( - secondaryEmbeddedHoverBlock, - /transform:\s*none;/, - ); + assert.match(secondaryEmbeddedHoverBlock, /right:\s*var\(--subtitle-sidebar-reserved-width\);/); + assert.match(secondaryEmbeddedHoverBlock, /max-width:\s*none;/); + assert.match(secondaryEmbeddedHoverBlock, /transform:\s*none;/); assert.doesNotMatch( secondaryEmbeddedHoverBlock, /transform:\s*translateX\(calc\(var\(--subtitle-sidebar-reserved-width\)\s*\*\s*-0\.5\)\);/, diff --git a/src/runtime-options.test.ts b/src/runtime-options.test.ts new file mode 100644 index 0000000..6c1e848 --- /dev/null +++ b/src/runtime-options.test.ts @@ -0,0 +1,64 @@ +import assert from 'node:assert/strict'; +import fs from 'node:fs'; +import path from 'node:path'; +import test from 'node:test'; + +import { RuntimeOptionsManager } from './runtime-options'; + +test('SM-012 runtime options path does not use JSON serialize-clone helpers', () => { + const source = fs.readFileSync(path.join(process.cwd(), 'src/runtime-options.ts'), 'utf-8'); + assert.equal(source.includes('JSON.parse(JSON.stringify('), false); +}); + +test('RuntimeOptionsManager returns detached effective Anki config copies', () => { + const baseConfig = { + deck: 'Mining', + note: 'Sentence', + tags: ['SubMiner'], + behavior: { + autoUpdateNewCards: true, + updateIntervalMs: 5000, + }, + fieldMapping: { + sentence: 'Sentence', + meaning: 'Meaning', + audio: 'Audio', + image: 'Image', + context: 'Context', + source: 'Source', + definition: 'Definition', + sequence: 'Sequence', + contextSecondary: 'ContextSecondary', + contextTertiary: 'ContextTertiary', + primarySpelling: 'PrimarySpelling', + primaryReading: 'PrimaryReading', + wordSpelling: 'WordSpelling', + wordReading: 'WordReading', + }, + duplicates: { + mode: 'note' as const, + scope: 'deck' as const, + allowedFields: [], + }, + ai: { + enabled: false, + model: '', + systemPrompt: '', + }, + }; + + const manager = new RuntimeOptionsManager( + () => structuredClone(baseConfig), + { + applyAnkiPatch: () => undefined, + onOptionsChanged: () => undefined, + }, + ); + + const effective = manager.getEffectiveAnkiConnectConfig(); + effective.tags!.push('mutated'); + effective.behavior!.autoUpdateNewCards = false; + + assert.deepEqual(baseConfig.tags, ['SubMiner']); + assert.equal(baseConfig.behavior.autoUpdateNewCards, true); +}); diff --git a/src/runtime-options.ts b/src/runtime-options.ts index abff64e..0056746 100644 --- a/src/runtime-options.ts +++ b/src/runtime-options.ts @@ -16,20 +16,20 @@ * along with this program. If not, see . */ +import { AnkiConnectConfig } from './types/anki'; import { - AnkiConnectConfig, RuntimeOptionApplyResult, RuntimeOptionId, RuntimeOptionState, RuntimeOptionValue, - SubtitleStyleConfig, -} from './types'; +} from './types/runtime-options'; +import { SubtitleStyleConfig } from './types/subtitle'; import { RUNTIME_OPTION_REGISTRY, RuntimeOptionRegistryEntry } from './config'; type RuntimeOverrides = Record; function deepClone(value: T): T { - return JSON.parse(JSON.stringify(value)) as T; + return structuredClone(value); } function getPathValue(source: Record, path: string): unknown { diff --git a/src/shared/fs-utils.ts b/src/shared/fs-utils.ts new file mode 100644 index 0000000..21c1889 --- /dev/null +++ b/src/shared/fs-utils.ts @@ -0,0 +1,14 @@ +import fs from 'node:fs'; +import path from 'node:path'; + +export function ensureDir(dirPath: string): void { + if (fs.existsSync(dirPath)) return; + fs.mkdirSync(dirPath, { recursive: true }); +} + +export function ensureDirForFile(filePath: string): void { + const dir = path.dirname(filePath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } +} diff --git a/src/shared/ipc/contracts.ts b/src/shared/ipc/contracts.ts index 426ce00..468b7d5 100644 --- a/src/shared/ipc/contracts.ts +++ b/src/shared/ipc/contracts.ts @@ -1,4 +1,5 @@ -import type { OverlayContentMeasurement, RuntimeOptionId, RuntimeOptionValue } from '../../types'; +import type { OverlayContentMeasurement } from '../../types/runtime'; +import type { RuntimeOptionId, RuntimeOptionValue } from '../../types/runtime-options'; export const OVERLAY_HOSTED_MODALS = [ 'runtime-options', diff --git a/src/shared/ipc/validators.ts b/src/shared/ipc/validators.ts index 8ce1001..1de1112 100644 --- a/src/shared/ipc/validators.ts +++ b/src/shared/ipc/validators.ts @@ -1,17 +1,17 @@ +import type { KikuFieldGroupingChoice, KikuMergePreviewRequest } from '../../types/anki'; import type { - ControllerConfigUpdate, - ControllerPreferenceUpdate, JimakuDownloadQuery, JimakuFilesQuery, JimakuSearchQuery, - KikuFieldGroupingChoice, - KikuMergePreviewRequest, - RuntimeOptionId, - RuntimeOptionValue, - SubtitlePosition, - SubsyncManualRunRequest, YoutubePickerResolveRequest, -} from '../../types'; +} from '../../types/integrations'; +import type { + ControllerConfigUpdate, + ControllerPreferenceUpdate, + SubsyncManualRunRequest, +} from '../../types/runtime'; +import type { RuntimeOptionId, RuntimeOptionValue } from '../../types/runtime-options'; +import type { SubtitlePosition } from '../../types/subtitle'; import { OVERLAY_HOSTED_MODALS, type OverlayHostedModal } from './contracts'; const RUNTIME_OPTION_IDS: RuntimeOptionId[] = [ @@ -255,7 +255,9 @@ export function parseJimakuDownloadQuery(value: unknown): JimakuDownloadQuery | }; } -export function parseYoutubePickerResolveRequest(value: unknown): YoutubePickerResolveRequest | null { +export function parseYoutubePickerResolveRequest( + value: unknown, +): YoutubePickerResolveRequest | null { if (!isObject(value)) return null; if (typeof value.sessionId !== 'string' || !value.sessionId.trim()) return null; if (value.action !== 'use-selected' && value.action !== 'continue-without-subtitles') return null; @@ -270,7 +272,11 @@ export function parseYoutubePickerResolveRequest(value: unknown): YoutubePickerR secondaryTrackId: null, }; } - if (value.primaryTrackId !== null && value.primaryTrackId !== undefined && typeof value.primaryTrackId !== 'string') { + if ( + value.primaryTrackId !== null && + value.primaryTrackId !== undefined && + typeof value.primaryTrackId !== 'string' + ) { return null; } if ( diff --git a/src/shared/log-files.test.ts b/src/shared/log-files.test.ts index 5c4d6c3..9093ea7 100644 --- a/src/shared/log-files.test.ts +++ b/src/shared/log-files.test.ts @@ -3,11 +3,7 @@ import assert from 'node:assert/strict'; import fs from 'node:fs'; import os from 'node:os'; import path from 'node:path'; -import { - appendLogLine, - pruneLogFiles, - resolveDefaultLogFilePath, -} from './log-files'; +import { appendLogLine, pruneLogFiles, resolveDefaultLogFilePath } from './log-files'; test('resolveDefaultLogFilePath uses app prefix by default', () => { const now = new Date('2026-03-22T12:00:00.000Z'); @@ -36,8 +32,16 @@ test('pruneLogFiles removes logs older than retention window', () => { fs.writeFileSync(stalePath, 'stale\n', 'utf8'); fs.writeFileSync(freshPath, 'fresh\n', 'utf8'); const now = new Date('2026-03-22T12:00:00.000Z'); - fs.utimesSync(stalePath, new Date('2026-03-01T12:00:00.000Z'), new Date('2026-03-01T12:00:00.000Z')); - fs.utimesSync(freshPath, new Date('2026-03-21T12:00:00.000Z'), new Date('2026-03-21T12:00:00.000Z')); + fs.utimesSync( + stalePath, + new Date('2026-03-01T12:00:00.000Z'), + new Date('2026-03-01T12:00:00.000Z'), + ); + fs.utimesSync( + freshPath, + new Date('2026-03-21T12:00:00.000Z'), + new Date('2026-03-21T12:00:00.000Z'), + ); try { pruneLogFiles(logsDir, { retentionDays: 7, now }); diff --git a/src/stats-daemon-control.test.ts b/src/stats-daemon-control.test.ts index acacc29..acf5f53 100644 --- a/src/stats-daemon-control.test.ts +++ b/src/stats-daemon-control.test.ts @@ -69,7 +69,9 @@ test('stats daemon control clears stale state, starts daemon, and waits for resp }, resolveUrl: (state) => `http://127.0.0.1:${state.port}`, spawnDaemon: async (options) => { - calls.push(`spawnDaemon:${options.scriptPath}:${options.responsePath}:${options.userDataPath}`); + calls.push( + `spawnDaemon:${options.scriptPath}:${options.responsePath}:${options.userDataPath}`, + ); return 999; }, waitForDaemonResponse: async (responsePath) => { diff --git a/src/stats-daemon-runner.ts b/src/stats-daemon-runner.ts index 2210b01..857d02b 100644 --- a/src/stats-daemon-runner.ts +++ b/src/stats-daemon-runner.ts @@ -13,7 +13,10 @@ import { writeBackgroundStatsServerState, } from './main/runtime/stats-daemon'; import { writeStatsCliCommandResponse } from './main/runtime/stats-cli-command'; -import { createInvokeStatsWordHelperHandler, type StatsWordHelperResponse } from './stats-word-helper-client'; +import { + createInvokeStatsWordHelperHandler, + type StatsWordHelperResponse, +} from './stats-word-helper-client'; const logger = createLogger('stats-daemon'); const STATS_WORD_HELPER_RESPONSE_TIMEOUT_MS = 20_000; diff --git a/src/stats-word-helper-client.ts b/src/stats-word-helper-client.ts index ab71425..4ba068b 100644 --- a/src/stats-word-helper-client.ts +++ b/src/stats-word-helper-client.ts @@ -33,7 +33,9 @@ export function createInvokeStatsWordHelperHandler(deps: { }); const startupResult = await Promise.race([ - deps.waitForResponse(responsePath).then((response) => ({ kind: 'response' as const, response })), + deps + .waitForResponse(responsePath) + .then((response) => ({ kind: 'response' as const, response })), helperExitPromise.then((status) => ({ kind: 'exit' as const, status })), ]); @@ -42,7 +44,9 @@ export function createInvokeStatsWordHelperHandler(deps: { response = startupResult.response; } else { if (startupResult.status !== 0) { - throw new Error(`Stats word helper exited before response (status ${startupResult.status}).`); + throw new Error( + `Stats word helper exited before response (status ${startupResult.status}).`, + ); } response = await deps.waitForResponse(responsePath); } diff --git a/src/types-domain-entrypoints.type-test.ts b/src/types-domain-entrypoints.type-test.ts new file mode 100644 index 0000000..60398bd --- /dev/null +++ b/src/types-domain-entrypoints.type-test.ts @@ -0,0 +1,39 @@ +import { PartOfSpeech as LegacyPartOfSpeech } from './types'; +import type { AnkiConnectConfig } from './types/anki'; +import type { ConfigValidationWarning, RawConfig, ResolvedConfig } from './types/config'; +import type { JimakuConfig, YoutubePickerOpenPayload } from './types/integrations'; +import type { ElectronAPI, MpvClient, OverlayContentMeasurement } from './types/runtime'; +import type { RuntimeOptionId, RuntimeOptionValue } from './types/runtime-options'; +import { PartOfSpeech, type SubtitleSidebarSnapshot } from './types/subtitle'; + +type Assert = T; +type IsAssignable = [From] extends [To] ? true : false; + +const runtimeEntryPointMatchesLegacyBarrel = LegacyPartOfSpeech === PartOfSpeech; +void runtimeEntryPointMatchesLegacyBarrel; + +type SubtitleEnumStillCompatible = Assert< + IsAssignable +>; + +type ConfigEntryPointContracts = [ + RawConfig, + ResolvedConfig, + ConfigValidationWarning, + RuntimeOptionId, + RuntimeOptionValue, +]; + +type IntegrationEntryPointContracts = [AnkiConnectConfig, JimakuConfig, YoutubePickerOpenPayload]; + +type RuntimeEntryPointContracts = [ + OverlayContentMeasurement, + ElectronAPI, + MpvClient, + SubtitleSidebarSnapshot, +]; + +void (null as unknown as SubtitleEnumStillCompatible); +void (null as unknown as ConfigEntryPointContracts); +void (null as unknown as IntegrationEntryPointContracts); +void (null as unknown as RuntimeEntryPointContracts); diff --git a/src/types.ts b/src/types.ts index 61e30ca..33e5adb 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,1255 +1,6 @@ -/* - * SubMiner - All-in-one sentence mining overlay - * Copyright (C) 2024 sudacode - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -import type { SubtitleCue } from './core/services/subtitle-cue-parser'; -import type { YoutubeTrackKind } from './core/services/youtube/kinds'; - -export enum PartOfSpeech { - noun = 'noun', - verb = 'verb', - i_adjective = 'i_adjective', - na_adjective = 'na_adjective', - particle = 'particle', - bound_auxiliary = 'bound_auxiliary', - symbol = 'symbol', - other = 'other', -} - -export interface Token { - word: string; - partOfSpeech: PartOfSpeech; - pos1: string; - pos2: string; - pos3: string; - pos4: string; - inflectionType: string; - inflectionForm: string; - headword: string; - katakanaReading: string; - pronunciation: string; -} - -export interface MergedToken { - surface: string; - reading: string; - headword: string; - startPos: number; - endPos: number; - partOfSpeech: PartOfSpeech; - pos1?: string; - pos2?: string; - pos3?: string; - isMerged: boolean; - isKnown: boolean; - isNPlusOneTarget: boolean; - isNameMatch?: boolean; - jlptLevel?: JlptLevel; - frequencyRank?: number; -} - -export type FrequencyDictionaryLookup = (term: string) => number | null; - -export type JlptLevel = 'N1' | 'N2' | 'N3' | 'N4' | 'N5'; - -export interface WindowGeometry { - x: number; - y: number; - width: number; - height: number; -} - -export interface SubtitlePosition { - yPercent: number; -} - -export interface SubtitleStyle { - fontSize: number; -} - -export interface Keybinding { - key: string; - command: (string | number)[] | null; -} - -export type SecondarySubMode = 'hidden' | 'visible' | 'hover'; - -export interface SecondarySubConfig { - secondarySubLanguages?: string[]; - autoLoadSecondarySub?: boolean; - defaultMode?: SecondarySubMode; -} - -export type SubsyncMode = 'auto' | 'manual'; - -export interface SubsyncConfig { - defaultMode?: SubsyncMode; - alass_path?: string; - ffsubsync_path?: string; - ffmpeg_path?: string; - replace?: boolean; -} - -export interface StartupWarmupsConfig { - lowPowerMode?: boolean; - mecab?: boolean; - yomitanExtension?: boolean; - subtitleDictionaries?: boolean; - jellyfinRemoteSession?: boolean; -} - -export interface WebSocketConfig { - enabled?: boolean | 'auto'; - port?: number; -} - -export interface AnnotationWebSocketConfig { - enabled?: boolean; - port?: number; -} - -export interface TexthookerConfig { - launchAtStartup?: boolean; - openBrowser?: boolean; -} - -export interface NotificationOptions { - body?: string; - icon?: string; -} - -export interface MpvClient { - currentSubText: string; - currentVideoPath: string; - currentMediaTitle?: string | null; - currentTimePos: number; - currentSubStart: number; - currentSubEnd: number; - currentAudioStreamIndex: number | null; - requestProperty?: (name: string) => Promise; - send(command: { command: unknown[]; request_id?: number }): boolean; -} - -export interface KikuDuplicateCardInfo { - noteId: number; - expression: string; - sentencePreview: string; - hasAudio: boolean; - hasImage: boolean; - isOriginal: boolean; -} - -export interface KikuFieldGroupingRequestData { - original: KikuDuplicateCardInfo; - duplicate: KikuDuplicateCardInfo; -} - -export interface KikuFieldGroupingChoice { - keepNoteId: number; - deleteNoteId: number; - deleteDuplicate: boolean; - cancelled: boolean; -} - -export interface KikuMergePreviewRequest { - keepNoteId: number; - deleteNoteId: number; - deleteDuplicate: boolean; -} - -export interface KikuMergePreviewResponse { - ok: boolean; - compact?: Record; - full?: Record; - error?: string; -} - -export type RuntimeOptionId = - | 'anki.autoUpdateNewCards' - | 'subtitle.annotation.nPlusOne' - | 'subtitle.annotation.jlpt' - | 'subtitle.annotation.frequency' - | 'anki.kikuFieldGrouping' - | 'anki.nPlusOneMatchMode'; - -export type RuntimeOptionScope = 'ankiConnect' | 'subtitle'; - -export type RuntimeOptionValueType = 'boolean' | 'enum'; - -export type RuntimeOptionValue = boolean | string; - -export type NPlusOneMatchMode = 'headword' | 'surface'; -export type FrequencyDictionaryMatchMode = 'headword' | 'surface'; - -export interface RuntimeOptionState { - id: RuntimeOptionId; - label: string; - scope: RuntimeOptionScope; - valueType: RuntimeOptionValueType; - value: RuntimeOptionValue; - allowedValues: RuntimeOptionValue[]; - requiresRestart: boolean; -} - -export interface RuntimeOptionApplyResult { - ok: boolean; - option?: RuntimeOptionState; - osdMessage?: string; - requiresRestart?: boolean; - error?: string; -} - -export interface AnkiConnectConfig { - enabled?: boolean; - url?: string; - pollingRate?: number; - proxy?: { - enabled?: boolean; - host?: string; - port?: number; - upstreamUrl?: string; - }; - tags?: string[]; - fields?: { - word?: string; - audio?: string; - image?: string; - sentence?: string; - miscInfo?: string; - translation?: string; - }; - ai?: boolean | AiFeatureConfig; - media?: { - generateAudio?: boolean; - generateImage?: boolean; - imageType?: 'static' | 'avif'; - imageFormat?: 'jpg' | 'png' | 'webp'; - imageQuality?: number; - imageMaxWidth?: number; - imageMaxHeight?: number; - animatedFps?: number; - animatedMaxWidth?: number; - animatedMaxHeight?: number; - animatedCrf?: number; - syncAnimatedImageToWordAudio?: boolean; - audioPadding?: number; - fallbackDuration?: number; - maxMediaDuration?: number; - }; - knownWords?: { - highlightEnabled?: boolean; - refreshMinutes?: number; - addMinedWordsImmediately?: boolean; - matchMode?: NPlusOneMatchMode; - decks?: Record; - color?: string; - }; - nPlusOne?: { - nPlusOne?: string; - minSentenceWords?: number; - }; - behavior?: { - overwriteAudio?: boolean; - overwriteImage?: boolean; - mediaInsertMode?: 'append' | 'prepend'; - highlightWord?: boolean; - notificationType?: 'osd' | 'system' | 'both' | 'none'; - autoUpdateNewCards?: boolean; - }; - metadata?: { - pattern?: string; - }; - deck?: string; - isLapis?: { - enabled?: boolean; - sentenceCardModel?: string; - }; - isKiku?: { - enabled?: boolean; - fieldGrouping?: 'auto' | 'manual' | 'disabled'; - deleteDuplicateInAuto?: boolean; - }; -} - -export interface SubtitleStyleConfig { - enableJlpt?: boolean; - preserveLineBreaks?: boolean; - autoPauseVideoOnHover?: boolean; - autoPauseVideoOnYomitanPopup?: boolean; - hoverTokenColor?: string; - hoverTokenBackgroundColor?: string; - nameMatchEnabled?: boolean; - nameMatchColor?: string; - fontFamily?: string; - fontSize?: number; - fontColor?: string; - fontWeight?: string | number; - fontStyle?: string; - lineHeight?: string | number; - letterSpacing?: string; - wordSpacing?: string | number; - fontKerning?: string; - textRendering?: string; - textShadow?: string; - backdropFilter?: string; - backgroundColor?: string; - nPlusOneColor?: string; - knownWordColor?: string; - jlptColors?: { - N1: string; - N2: string; - N3: string; - N4: string; - N5: string; - }; - frequencyDictionary?: { - enabled?: boolean; - sourcePath?: string; - topX?: number; - mode?: FrequencyDictionaryMode; - matchMode?: FrequencyDictionaryMatchMode; - singleColor?: string; - bandedColors?: [string, string, string, string, string]; - }; - secondary?: { - fontFamily?: string; - fontSize?: number; - fontColor?: string; - fontWeight?: string | number; - fontStyle?: string; - lineHeight?: string | number; - letterSpacing?: string; - wordSpacing?: string | number; - fontKerning?: string; - textRendering?: string; - textShadow?: string; - backdropFilter?: string; - backgroundColor?: string; - }; -} - -export interface TokenPos1ExclusionConfig { - defaults?: string[]; - add?: string[]; - remove?: string[]; -} - -export interface ResolvedTokenPos1ExclusionConfig { - defaults: string[]; - add: string[]; - remove: string[]; -} - -export interface TokenPos2ExclusionConfig { - defaults?: string[]; - add?: string[]; - remove?: string[]; -} - -export interface ResolvedTokenPos2ExclusionConfig { - defaults: string[]; - add: string[]; - remove: string[]; -} - -export type FrequencyDictionaryMode = 'single' | 'banded'; - -export type { SubtitleCue } from './core/services/subtitle-cue-parser'; - -export type SubtitleSidebarLayout = 'overlay' | 'embedded'; - -export interface SubtitleSidebarConfig { - enabled?: boolean; - autoOpen?: boolean; - layout?: SubtitleSidebarLayout; - toggleKey?: string; - pauseVideoOnHover?: boolean; - autoScroll?: boolean; - maxWidth?: number; - opacity?: number; - backgroundColor?: string; - textColor?: string; - fontFamily?: string; - fontSize?: number; - timestampColor?: string; - activeLineColor?: string; - activeLineBackgroundColor?: string; - hoverLineBackgroundColor?: string; -} - -export interface ShortcutsConfig { - toggleVisibleOverlayGlobal?: string | null; - copySubtitle?: string | null; - copySubtitleMultiple?: string | null; - updateLastCardFromClipboard?: string | null; - triggerFieldGrouping?: string | null; - triggerSubsync?: string | null; - mineSentence?: string | null; - mineSentenceMultiple?: string | null; - multiCopyTimeoutMs?: number; - toggleSecondarySub?: string | null; - markAudioCard?: string | null; - openRuntimeOptions?: string | null; - openJimaku?: string | null; -} - -export type ControllerButtonBinding = - | 'none' - | 'select' - | 'buttonSouth' - | 'buttonEast' - | 'buttonNorth' - | 'buttonWest' - | 'leftShoulder' - | 'rightShoulder' - | 'leftStickPress' - | 'rightStickPress' - | 'leftTrigger' - | 'rightTrigger'; - -export type ControllerAxisBinding = 'leftStickX' | 'leftStickY' | 'rightStickX' | 'rightStickY'; -export type ControllerTriggerInputMode = 'auto' | 'digital' | 'analog'; -export type ControllerAxisDirection = 'negative' | 'positive'; -export type ControllerDpadFallback = 'none' | 'horizontal' | 'vertical'; - -export interface ControllerNoneBinding { - kind: 'none'; -} - -export interface ControllerButtonInputBinding { - kind: 'button'; - buttonIndex: number; -} - -export interface ControllerAxisDirectionInputBinding { - kind: 'axis'; - axisIndex: number; - direction: ControllerAxisDirection; -} - -export interface ControllerAxisInputBinding { - kind: 'axis'; - axisIndex: number; - dpadFallback?: ControllerDpadFallback; -} - -export type ControllerDiscreteBindingConfig = - | ControllerButtonBinding - | ControllerNoneBinding - | ControllerButtonInputBinding - | ControllerAxisDirectionInputBinding; - -export type ResolvedControllerDiscreteBinding = - | ControllerNoneBinding - | ControllerButtonInputBinding - | ControllerAxisDirectionInputBinding; - -export type ControllerAxisBindingConfig = - | ControllerAxisBinding - | ControllerNoneBinding - | ControllerAxisInputBinding; - -export type ResolvedControllerAxisBinding = - | ControllerNoneBinding - | { - kind: 'axis'; - axisIndex: number; - dpadFallback: ControllerDpadFallback; - }; - -export interface ControllerBindingsConfig { - toggleLookup?: ControllerDiscreteBindingConfig; - closeLookup?: ControllerDiscreteBindingConfig; - toggleKeyboardOnlyMode?: ControllerDiscreteBindingConfig; - mineCard?: ControllerDiscreteBindingConfig; - quitMpv?: ControllerDiscreteBindingConfig; - previousAudio?: ControllerDiscreteBindingConfig; - nextAudio?: ControllerDiscreteBindingConfig; - playCurrentAudio?: ControllerDiscreteBindingConfig; - toggleMpvPause?: ControllerDiscreteBindingConfig; - leftStickHorizontal?: ControllerAxisBindingConfig; - leftStickVertical?: ControllerAxisBindingConfig; - rightStickHorizontal?: ControllerAxisBindingConfig; - rightStickVertical?: ControllerAxisBindingConfig; -} - -export interface ResolvedControllerBindingsConfig { - toggleLookup?: ResolvedControllerDiscreteBinding; - closeLookup?: ResolvedControllerDiscreteBinding; - toggleKeyboardOnlyMode?: ResolvedControllerDiscreteBinding; - mineCard?: ResolvedControllerDiscreteBinding; - quitMpv?: ResolvedControllerDiscreteBinding; - previousAudio?: ResolvedControllerDiscreteBinding; - nextAudio?: ResolvedControllerDiscreteBinding; - playCurrentAudio?: ResolvedControllerDiscreteBinding; - toggleMpvPause?: ResolvedControllerDiscreteBinding; - leftStickHorizontal?: ResolvedControllerAxisBinding; - leftStickVertical?: ResolvedControllerAxisBinding; - rightStickHorizontal?: ResolvedControllerAxisBinding; - rightStickVertical?: ResolvedControllerAxisBinding; -} - -export interface ControllerButtonIndicesConfig { - select?: number; - buttonSouth?: number; - buttonEast?: number; - buttonNorth?: number; - buttonWest?: number; - leftShoulder?: number; - rightShoulder?: number; - leftStickPress?: number; - rightStickPress?: number; - leftTrigger?: number; - rightTrigger?: number; -} - -export interface ControllerConfig { - enabled?: boolean; - preferredGamepadId?: string; - preferredGamepadLabel?: string; - smoothScroll?: boolean; - scrollPixelsPerSecond?: number; - horizontalJumpPixels?: number; - stickDeadzone?: number; - triggerInputMode?: ControllerTriggerInputMode; - triggerDeadzone?: number; - repeatDelayMs?: number; - repeatIntervalMs?: number; - buttonIndices?: ControllerButtonIndicesConfig; - bindings?: ControllerBindingsConfig; -} - -export interface ControllerPreferenceUpdate { - preferredGamepadId: string; - preferredGamepadLabel: string; -} - -export type ControllerConfigUpdate = ControllerConfig; - -export interface ControllerDeviceInfo { - id: string; - index: number; - mapping: string; - connected: boolean; -} - -export interface ControllerButtonSnapshot { - value: number; - pressed: boolean; - touched?: boolean; -} - -export interface ControllerRuntimeSnapshot { - connectedGamepads: ControllerDeviceInfo[]; - activeGamepadId: string | null; - rawAxes: number[]; - rawButtons: ControllerButtonSnapshot[]; -} - -export type JimakuLanguagePreference = 'ja' | 'en' | 'none'; -export type { YoutubeTrackKind }; - -export interface YoutubeTrackOption { - id: string; - language: string; - sourceLanguage: string; - kind: YoutubeTrackKind; - label: string; - title?: string; - downloadUrl?: string; - fileExtension?: string; -} - -export interface YoutubePickerOpenPayload { - sessionId: string; - url: string; - tracks: YoutubeTrackOption[]; - defaultPrimaryTrackId: string | null; - defaultSecondaryTrackId: string | null; - hasTracks: boolean; -} - -export type YoutubePickerResolveRequest = - | { - sessionId: string; - action: 'continue-without-subtitles'; - primaryTrackId: null; - secondaryTrackId: null; - } - | { - sessionId: string; - action: 'use-selected'; - primaryTrackId: string | null; - secondaryTrackId: string | null; - }; - -export interface YoutubePickerResolveResult { - ok: boolean; - message: string; -} - -export interface JimakuConfig { - apiKey?: string; - apiKeyCommand?: string; - apiBaseUrl?: string; - languagePreference?: JimakuLanguagePreference; - maxEntryResults?: number; -} - -export type AnilistCharacterDictionaryEvictionPolicy = 'disable' | 'delete'; -export type AnilistCharacterDictionaryProfileScope = 'all' | 'active'; -export type AnilistCharacterDictionaryCollapsibleSectionKey = - | 'description' - | 'characterInformation' - | 'voicedBy'; - -export interface AnilistCharacterDictionaryCollapsibleSectionsConfig { - description?: boolean; - characterInformation?: boolean; - voicedBy?: boolean; -} - -export interface AnilistCharacterDictionaryConfig { - enabled?: boolean; - refreshTtlHours?: number; - maxLoaded?: number; - evictionPolicy?: AnilistCharacterDictionaryEvictionPolicy; - profileScope?: AnilistCharacterDictionaryProfileScope; - collapsibleSections?: AnilistCharacterDictionaryCollapsibleSectionsConfig; -} - -export interface AnilistConfig { - enabled?: boolean; - accessToken?: string; - characterDictionary?: AnilistCharacterDictionaryConfig; -} - -export interface YomitanConfig { - externalProfilePath?: string; -} - -export interface JellyfinConfig { - enabled?: boolean; - serverUrl?: string; - username?: string; - deviceId?: string; - clientName?: string; - clientVersion?: string; - defaultLibraryId?: string; - remoteControlEnabled?: boolean; - remoteControlAutoConnect?: boolean; - autoAnnounce?: boolean; - remoteControlDeviceName?: string; - pullPictures?: boolean; - iconCacheDir?: string; - directPlayPreferred?: boolean; - directPlayContainers?: string[]; - transcodeVideoCodec?: string; -} - -export interface DiscordPresenceConfig { - enabled?: boolean; - updateIntervalMs?: number; - debounceMs?: number; -} - -export interface AiFeatureConfig { - enabled?: boolean; - model?: string; - systemPrompt?: string; -} - -export interface AiConfig { - enabled?: boolean; - apiKey?: string; - apiKeyCommand?: string; - baseUrl?: string; - model?: string; - systemPrompt?: string; - requestTimeoutMs?: number; -} - -export interface YoutubeConfig { - primarySubLanguages?: string[]; -} - -export interface YoutubeSubgenConfig { - whisperBin?: string; - whisperModel?: string; - whisperVadModel?: string; - whisperThreads?: number; - fixWithAi?: boolean; - ai?: AiFeatureConfig; -} - -export interface StatsConfig { - toggleKey?: string; - markWatchedKey?: string; - serverPort?: number; - autoStartServer?: boolean; - autoOpenBrowser?: boolean; -} - -export type ImmersionTrackingRetentionMode = 'preset' | 'advanced'; -export type ImmersionTrackingRetentionPreset = 'minimal' | 'balanced' | 'deep-history'; - -export interface ImmersionTrackingConfig { - enabled?: boolean; - dbPath?: string; - batchSize?: number; - flushIntervalMs?: number; - queueCap?: number; - payloadCapBytes?: number; - maintenanceIntervalMs?: number; - retentionMode?: ImmersionTrackingRetentionMode; - retentionPreset?: ImmersionTrackingRetentionPreset; - retention?: { - eventsDays?: number; - telemetryDays?: number; - sessionsDays?: number; - dailyRollupsDays?: number; - monthlyRollupsDays?: number; - vacuumIntervalDays?: number; - }; - lifetimeSummaries?: { - global?: boolean; - anime?: boolean; - media?: boolean; - }; -} - -export interface Config { - subtitlePosition?: SubtitlePosition; - keybindings?: Keybinding[]; - websocket?: WebSocketConfig; - annotationWebsocket?: AnnotationWebSocketConfig; - texthooker?: TexthookerConfig; - controller?: ControllerConfig; - ankiConnect?: AnkiConnectConfig; - shortcuts?: ShortcutsConfig; - secondarySub?: SecondarySubConfig; - subsync?: SubsyncConfig; - startupWarmups?: StartupWarmupsConfig; - subtitleStyle?: SubtitleStyleConfig; - subtitleSidebar?: SubtitleSidebarConfig; - auto_start_overlay?: boolean; - jimaku?: JimakuConfig; - anilist?: AnilistConfig; - yomitan?: YomitanConfig; - jellyfin?: JellyfinConfig; - discordPresence?: DiscordPresenceConfig; - ai?: AiConfig; - youtube?: YoutubeConfig; - youtubeSubgen?: YoutubeSubgenConfig; - immersionTracking?: ImmersionTrackingConfig; - stats?: StatsConfig; - logging?: { - level?: 'debug' | 'info' | 'warn' | 'error'; - }; -} - -export type RawConfig = Config; - -export interface ResolvedConfig { - subtitlePosition: SubtitlePosition; - keybindings: Keybinding[]; - websocket: Required; - annotationWebsocket: Required; - texthooker: Required; - controller: { - enabled: boolean; - preferredGamepadId: string; - preferredGamepadLabel: string; - smoothScroll: boolean; - scrollPixelsPerSecond: number; - horizontalJumpPixels: number; - stickDeadzone: number; - triggerInputMode: ControllerTriggerInputMode; - triggerDeadzone: number; - repeatDelayMs: number; - repeatIntervalMs: number; - buttonIndices: Required; - bindings: Required; - }; - ankiConnect: AnkiConnectConfig & { - enabled: boolean; - url: string; - pollingRate: number; - proxy: { - enabled: boolean; - host: string; - port: number; - upstreamUrl: string; - }; - tags: string[]; - fields: { - word: string; - audio: string; - image: string; - sentence: string; - miscInfo: string; - translation: string; - }; - ai: AiFeatureConfig & { - enabled: boolean; - }; - media: { - generateAudio: boolean; - generateImage: boolean; - imageType: 'static' | 'avif'; - imageFormat: 'jpg' | 'png' | 'webp'; - imageQuality: number; - imageMaxWidth?: number; - imageMaxHeight?: number; - animatedFps: number; - animatedMaxWidth: number; - animatedMaxHeight?: number; - animatedCrf: number; - syncAnimatedImageToWordAudio: boolean; - audioPadding: number; - fallbackDuration: number; - maxMediaDuration: number; - }; - knownWords: { - highlightEnabled: boolean; - refreshMinutes: number; - addMinedWordsImmediately: boolean; - matchMode: NPlusOneMatchMode; - decks: Record; - color: string; - }; - nPlusOne: { - nPlusOne: string; - minSentenceWords: number; - }; - behavior: { - overwriteAudio: boolean; - overwriteImage: boolean; - mediaInsertMode: 'append' | 'prepend'; - highlightWord: boolean; - notificationType: 'osd' | 'system' | 'both' | 'none'; - autoUpdateNewCards: boolean; - }; - metadata: { - pattern: string; - }; - isLapis: { - enabled: boolean; - sentenceCardModel: string; - }; - isKiku: { - enabled: boolean; - fieldGrouping: 'auto' | 'manual' | 'disabled'; - deleteDuplicateInAuto: boolean; - }; - }; - shortcuts: Required; - secondarySub: Required; - subsync: Required; - startupWarmups: { - lowPowerMode: boolean; - mecab: boolean; - yomitanExtension: boolean; - subtitleDictionaries: boolean; - jellyfinRemoteSession: boolean; - }; - subtitleStyle: Required> & { - secondary: Required>; - frequencyDictionary: { - enabled: boolean; - sourcePath: string; - topX: number; - mode: FrequencyDictionaryMode; - matchMode: FrequencyDictionaryMatchMode; - singleColor: string; - bandedColors: [string, string, string, string, string]; - }; - }; - subtitleSidebar: Required; - auto_start_overlay: boolean; - jimaku: JimakuConfig & { - apiBaseUrl: string; - languagePreference: JimakuLanguagePreference; - maxEntryResults: number; - }; - anilist: { - enabled: boolean; - accessToken: string; - characterDictionary: { - enabled: boolean; - refreshTtlHours: number; - maxLoaded: number; - evictionPolicy: AnilistCharacterDictionaryEvictionPolicy; - profileScope: AnilistCharacterDictionaryProfileScope; - collapsibleSections: Required; - }; - }; - yomitan: { - externalProfilePath: string; - }; - jellyfin: { - enabled: boolean; - serverUrl: string; - username: string; - deviceId: string; - clientName: string; - clientVersion: string; - defaultLibraryId: string; - remoteControlEnabled: boolean; - remoteControlAutoConnect: boolean; - autoAnnounce: boolean; - remoteControlDeviceName: string; - pullPictures: boolean; - iconCacheDir: string; - directPlayPreferred: boolean; - directPlayContainers: string[]; - transcodeVideoCodec: string; - }; - discordPresence: { - enabled: boolean; - updateIntervalMs: number; - debounceMs: number; - }; - ai: AiConfig & { - enabled: boolean; - apiKey: string; - apiKeyCommand: string; - baseUrl: string; - model: string; - systemPrompt: string; - requestTimeoutMs: number; - }; - youtube: YoutubeConfig & { - primarySubLanguages: string[]; - }; - youtubeSubgen: YoutubeSubgenConfig & { - whisperBin: string; - whisperModel: string; - whisperVadModel: string; - whisperThreads: number; - fixWithAi: boolean; - ai: AiFeatureConfig; - }; - immersionTracking: { - enabled: boolean; - dbPath?: string; - batchSize: number; - flushIntervalMs: number; - queueCap: number; - payloadCapBytes: number; - maintenanceIntervalMs: number; - retentionMode: ImmersionTrackingRetentionMode; - retentionPreset: ImmersionTrackingRetentionPreset; - retention: { - eventsDays: number; - telemetryDays: number; - sessionsDays: number; - dailyRollupsDays: number; - monthlyRollupsDays: number; - vacuumIntervalDays: number; - }; - lifetimeSummaries: { - global: boolean; - anime: boolean; - media: boolean; - }; - }; - stats: { - toggleKey: string; - markWatchedKey: string; - serverPort: number; - autoStartServer: boolean; - autoOpenBrowser: boolean; - }; - logging: { - level: 'debug' | 'info' | 'warn' | 'error'; - }; -} - -export interface ConfigValidationWarning { - path: string; - value: unknown; - fallback: unknown; - message: string; -} - -export interface SubsyncSourceTrack { - id: number; - label: string; -} - -export interface SubsyncManualPayload { - sourceTracks: SubsyncSourceTrack[]; -} - -export interface SubsyncManualRunRequest { - engine: 'alass' | 'ffsubsync'; - sourceTrackId?: number | null; -} - -export interface SubsyncResult { - ok: boolean; - message: string; -} - -export interface ClipboardAppendResult { - ok: boolean; - message: string; -} - -export interface SubtitleData { - text: string; - tokens: MergedToken[] | null; - startTime?: number | null; - endTime?: number | null; -} - -export interface SubtitleSidebarSnapshot { - cues: SubtitleCue[]; - currentTimeSec?: number | null; - currentSubtitle: { - text: string; - startTime: number | null; - endTime: number | null; - }; - config: Required; -} - -export interface MpvSubtitleRenderMetrics { - subPos: number; - subFontSize: number; - subScale: number; - subMarginY: number; - subMarginX: number; - subFont: string; - subSpacing: number; - subBold: boolean; - subItalic: boolean; - subBorderSize: number; - subShadowOffset: number; - subAssOverride: string; - subScaleByWindow: boolean; - subUseMargins: boolean; - osdHeight: number; - osdDimensions: { - w: number; - h: number; - ml: number; - mr: number; - mt: number; - mb: number; - } | null; -} - -export type OverlayLayer = 'visible'; - -export interface OverlayContentRect { - x: number; - y: number; - width: number; - height: number; -} - -export interface OverlayContentMeasurement { - layer: OverlayLayer; - measuredAtMs: number; - viewport: { - width: number; - height: number; - }; - contentRect: OverlayContentRect | null; -} - -export interface MecabStatus { - available: boolean; - enabled: boolean; - path: string | null; -} - -export type JimakuConfidence = 'high' | 'medium' | 'low'; - -export interface JimakuMediaInfo { - title: string; - season: number | null; - episode: number | null; - confidence: JimakuConfidence; - filename: string; - rawTitle: string; -} - -export interface JimakuSearchQuery { - query: string; -} - -export interface JimakuEntryFlags { - anime?: boolean; - movie?: boolean; - adult?: boolean; - external?: boolean; - unverified?: boolean; -} - -export interface JimakuEntry { - id: number; - name: string; - english_name?: string | null; - japanese_name?: string | null; - flags?: JimakuEntryFlags; - last_modified?: string; -} - -export interface JimakuFilesQuery { - entryId: number; - episode?: number | null; -} - -export interface JimakuFileEntry { - name: string; - url: string; - size: number; - last_modified: string; -} - -export interface JimakuDownloadQuery { - entryId: number; - url: string; - name: string; -} - -export interface JimakuApiError { - error: string; - code?: number; - retryAfter?: number; -} - -export type JimakuApiResponse = { ok: true; data: T } | { ok: false; error: JimakuApiError }; - -export type JimakuDownloadResult = - | { ok: true; path: string } - | { ok: false; error: JimakuApiError }; - -export interface ConfigHotReloadPayload { - keybindings: Keybinding[]; - subtitleStyle: SubtitleStyleConfig | null; - subtitleSidebar: Required; - secondarySubMode: SecondarySubMode; -} - -export type ResolvedControllerConfig = ResolvedConfig['controller']; - -export interface SubtitleHoverTokenPayload { - tokenIndex: number | null; -} - -export interface ElectronAPI { - getOverlayLayer: () => 'visible' | 'modal' | null; - onSubtitle: (callback: (data: SubtitleData) => void) => void; - onVisibility: (callback: (visible: boolean) => void) => void; - onSubtitlePosition: (callback: (position: SubtitlePosition | null) => void) => void; - getOverlayVisibility: () => Promise; - getCurrentSubtitle: () => Promise; - getCurrentSubtitleRaw: () => Promise; - getCurrentSubtitleAss: () => Promise; - getSubtitleSidebarSnapshot: () => Promise; - getPlaybackPaused: () => Promise; - onSubtitleAss: (callback: (assText: string) => void) => void; - setIgnoreMouseEvents: (ignore: boolean, options?: { forward?: boolean }) => void; - openYomitanSettings: () => void; - recordYomitanLookup: () => void; - getSubtitlePosition: () => Promise; - saveSubtitlePosition: (position: SubtitlePosition) => void; - getMecabStatus: () => Promise; - setMecabEnabled: (enabled: boolean) => void; - sendMpvCommand: (command: (string | number)[]) => void; - getKeybindings: () => Promise; - getConfiguredShortcuts: () => Promise>; - getStatsToggleKey: () => Promise; - getMarkWatchedKey: () => Promise; - markActiveVideoWatched: () => Promise; - getControllerConfig: () => Promise; - saveControllerConfig: (update: ControllerConfigUpdate) => Promise; - saveControllerPreference: (update: ControllerPreferenceUpdate) => Promise; - getJimakuMediaInfo: () => Promise; - jimakuSearchEntries: (query: JimakuSearchQuery) => Promise>; - jimakuListFiles: (query: JimakuFilesQuery) => Promise>; - jimakuDownloadFile: (query: JimakuDownloadQuery) => Promise; - quitApp: () => void; - toggleDevTools: () => void; - toggleOverlay: () => void; - toggleStatsOverlay: () => void; - getAnkiConnectStatus: () => Promise; - setAnkiConnectEnabled: (enabled: boolean) => void; - clearAnkiConnectHistory: () => void; - onSecondarySub: (callback: (text: string) => void) => void; - onSecondarySubMode: (callback: (mode: SecondarySubMode) => void) => void; - getSecondarySubMode: () => Promise; - getCurrentSecondarySub: () => Promise; - focusMainWindow: () => Promise; - getSubtitleStyle: () => Promise; - onSubsyncManualOpen: (callback: (payload: SubsyncManualPayload) => void) => void; - runSubsyncManual: (request: SubsyncManualRunRequest) => Promise; - onKikuFieldGroupingRequest: (callback: (data: KikuFieldGroupingRequestData) => void) => void; - kikuBuildMergePreview: (request: KikuMergePreviewRequest) => Promise; - kikuFieldGroupingRespond: (choice: KikuFieldGroupingChoice) => void; - getRuntimeOptions: () => Promise; - setRuntimeOptionValue: ( - id: RuntimeOptionId, - value: RuntimeOptionValue, - ) => Promise; - cycleRuntimeOption: (id: RuntimeOptionId, direction: 1 | -1) => Promise; - onRuntimeOptionsChanged: (callback: (options: RuntimeOptionState[]) => void) => void; - onOpenRuntimeOptions: (callback: () => void) => void; - onOpenJimaku: (callback: () => void) => void; - onOpenYoutubeTrackPicker: (callback: (payload: YoutubePickerOpenPayload) => void) => void; - onCancelYoutubeTrackPicker: (callback: () => void) => void; - onKeyboardModeToggleRequested: (callback: () => void) => void; - onLookupWindowToggleRequested: (callback: () => void) => void; - appendClipboardVideoToQueue: () => Promise; - youtubePickerResolve: ( - request: YoutubePickerResolveRequest, - ) => Promise; - notifyOverlayModalClosed: ( - modal: - | 'runtime-options' - | 'subsync' - | 'jimaku' - | 'youtube-track-picker' - | 'kiku' - | 'controller-select' - | 'controller-debug' - | 'subtitle-sidebar', - ) => void; - notifyOverlayModalOpened: ( - modal: - | 'runtime-options' - | 'subsync' - | 'jimaku' - | 'youtube-track-picker' - | 'kiku' - | 'controller-select' - | 'controller-debug' - | 'subtitle-sidebar', - ) => void; - reportOverlayContentBounds: (measurement: OverlayContentMeasurement) => void; - onConfigHotReload: (callback: (payload: ConfigHotReloadPayload) => void) => void; -} - -declare global { - interface Window { - electronAPI: ElectronAPI; - } -} +export * from './types/anki'; +export * from './types/config'; +export * from './types/integrations'; +export * from './types/runtime'; +export * from './types/runtime-options'; +export * from './types/subtitle'; diff --git a/src/types/anki.ts b/src/types/anki.ts new file mode 100644 index 0000000..e6b15b5 --- /dev/null +++ b/src/types/anki.ts @@ -0,0 +1,113 @@ +import type { AiFeatureConfig } from './integrations'; +import type { NPlusOneMatchMode } from './subtitle'; + +export interface NotificationOptions { + body?: string; + icon?: string; +} + +export interface KikuDuplicateCardInfo { + noteId: number; + expression: string; + sentencePreview: string; + hasAudio: boolean; + hasImage: boolean; + isOriginal: boolean; +} + +export interface KikuFieldGroupingRequestData { + original: KikuDuplicateCardInfo; + duplicate: KikuDuplicateCardInfo; +} + +export interface KikuFieldGroupingChoice { + keepNoteId: number; + deleteNoteId: number; + deleteDuplicate: boolean; + cancelled: boolean; +} + +export interface KikuMergePreviewRequest { + keepNoteId: number; + deleteNoteId: number; + deleteDuplicate: boolean; +} + +export interface KikuMergePreviewResponse { + ok: boolean; + compact?: Record; + full?: Record; + error?: string; +} + +export interface AnkiConnectConfig { + enabled?: boolean; + url?: string; + pollingRate?: number; + proxy?: { + enabled?: boolean; + host?: string; + port?: number; + upstreamUrl?: string; + }; + tags?: string[]; + fields?: { + word?: string; + audio?: string; + image?: string; + sentence?: string; + miscInfo?: string; + translation?: string; + }; + ai?: boolean | AiFeatureConfig; + media?: { + generateAudio?: boolean; + generateImage?: boolean; + imageType?: 'static' | 'avif'; + imageFormat?: 'jpg' | 'png' | 'webp'; + imageQuality?: number; + imageMaxWidth?: number; + imageMaxHeight?: number; + animatedFps?: number; + animatedMaxWidth?: number; + animatedMaxHeight?: number; + animatedCrf?: number; + syncAnimatedImageToWordAudio?: boolean; + audioPadding?: number; + fallbackDuration?: number; + maxMediaDuration?: number; + }; + knownWords?: { + highlightEnabled?: boolean; + refreshMinutes?: number; + addMinedWordsImmediately?: boolean; + matchMode?: NPlusOneMatchMode; + decks?: Record; + color?: string; + }; + nPlusOne?: { + nPlusOne?: string; + minSentenceWords?: number; + }; + behavior?: { + overwriteAudio?: boolean; + overwriteImage?: boolean; + mediaInsertMode?: 'append' | 'prepend'; + highlightWord?: boolean; + notificationType?: 'osd' | 'system' | 'both' | 'none'; + autoUpdateNewCards?: boolean; + }; + metadata?: { + pattern?: string; + }; + deck?: string; + isLapis?: { + enabled?: boolean; + sentenceCardModel?: string; + }; + isKiku?: { + enabled?: boolean; + fieldGrouping?: 'auto' | 'manual' | 'disabled'; + deleteDuplicateInAuto?: boolean; + }; +} diff --git a/src/types/config.ts b/src/types/config.ts new file mode 100644 index 0000000..fd44f4e --- /dev/null +++ b/src/types/config.ts @@ -0,0 +1,341 @@ +import type { AnkiConnectConfig } from './anki'; +import type { + AiConfig, + AiFeatureConfig, + AnilistCharacterDictionaryCollapsibleSectionsConfig, + AnilistCharacterDictionaryEvictionPolicy, + AnilistCharacterDictionaryProfileScope, + AnilistConfig, + DiscordPresenceConfig, + ImmersionTrackingConfig, + ImmersionTrackingRetentionMode, + ImmersionTrackingRetentionPreset, + JellyfinConfig, + JimakuConfig, + JimakuLanguagePreference, + StatsConfig, + YomitanConfig, + YoutubeConfig, + YoutubeSubgenConfig, +} from './integrations'; +import type { + ControllerButtonIndicesConfig, + ControllerConfig, + ControllerTriggerInputMode, + Keybinding, + ResolvedControllerBindingsConfig, +} from './runtime'; +import type { + FrequencyDictionaryMatchMode, + FrequencyDictionaryMode, + NPlusOneMatchMode, + SecondarySubConfig, + SubtitlePosition, + SubtitleSidebarConfig, + SubtitleStyleConfig, +} from './subtitle'; + +export interface WebSocketConfig { + enabled?: boolean | 'auto'; + port?: number; +} + +export interface AnnotationWebSocketConfig { + enabled?: boolean; + port?: number; +} + +export interface TexthookerConfig { + launchAtStartup?: boolean; + openBrowser?: boolean; +} + +export type SubsyncMode = 'auto' | 'manual'; + +export interface SubsyncConfig { + defaultMode?: SubsyncMode; + alass_path?: string; + ffsubsync_path?: string; + ffmpeg_path?: string; + replace?: boolean; +} + +export interface StartupWarmupsConfig { + lowPowerMode?: boolean; + mecab?: boolean; + yomitanExtension?: boolean; + subtitleDictionaries?: boolean; + jellyfinRemoteSession?: boolean; +} + +export interface ShortcutsConfig { + toggleVisibleOverlayGlobal?: string | null; + copySubtitle?: string | null; + copySubtitleMultiple?: string | null; + updateLastCardFromClipboard?: string | null; + triggerFieldGrouping?: string | null; + triggerSubsync?: string | null; + mineSentence?: string | null; + mineSentenceMultiple?: string | null; + multiCopyTimeoutMs?: number; + toggleSecondarySub?: string | null; + markAudioCard?: string | null; + openRuntimeOptions?: string | null; + openJimaku?: string | null; +} + +export interface Config { + subtitlePosition?: SubtitlePosition; + keybindings?: Keybinding[]; + websocket?: WebSocketConfig; + annotationWebsocket?: AnnotationWebSocketConfig; + texthooker?: TexthookerConfig; + controller?: ControllerConfig; + ankiConnect?: AnkiConnectConfig; + shortcuts?: ShortcutsConfig; + secondarySub?: SecondarySubConfig; + subsync?: SubsyncConfig; + startupWarmups?: StartupWarmupsConfig; + subtitleStyle?: SubtitleStyleConfig; + subtitleSidebar?: SubtitleSidebarConfig; + auto_start_overlay?: boolean; + jimaku?: JimakuConfig; + anilist?: AnilistConfig; + yomitan?: YomitanConfig; + jellyfin?: JellyfinConfig; + discordPresence?: DiscordPresenceConfig; + ai?: AiConfig; + youtube?: YoutubeConfig; + youtubeSubgen?: YoutubeSubgenConfig; + immersionTracking?: ImmersionTrackingConfig; + stats?: StatsConfig; + logging?: { + level?: 'debug' | 'info' | 'warn' | 'error'; + }; +} + +export type RawConfig = Config; + +export interface ResolvedConfig { + subtitlePosition: SubtitlePosition; + keybindings: Keybinding[]; + websocket: Required; + annotationWebsocket: Required; + texthooker: Required; + controller: { + enabled: boolean; + preferredGamepadId: string; + preferredGamepadLabel: string; + smoothScroll: boolean; + scrollPixelsPerSecond: number; + horizontalJumpPixels: number; + stickDeadzone: number; + triggerInputMode: ControllerTriggerInputMode; + triggerDeadzone: number; + repeatDelayMs: number; + repeatIntervalMs: number; + buttonIndices: Required; + bindings: Required; + }; + ankiConnect: AnkiConnectConfig & { + enabled: boolean; + url: string; + pollingRate: number; + proxy: { + enabled: boolean; + host: string; + port: number; + upstreamUrl: string; + }; + tags: string[]; + fields: { + word: string; + audio: string; + image: string; + sentence: string; + miscInfo: string; + translation: string; + }; + ai: AiFeatureConfig & { + enabled: boolean; + }; + media: { + generateAudio: boolean; + generateImage: boolean; + imageType: 'static' | 'avif'; + imageFormat: 'jpg' | 'png' | 'webp'; + imageQuality: number; + imageMaxWidth?: number; + imageMaxHeight?: number; + animatedFps: number; + animatedMaxWidth: number; + animatedMaxHeight?: number; + animatedCrf: number; + syncAnimatedImageToWordAudio: boolean; + audioPadding: number; + fallbackDuration: number; + maxMediaDuration: number; + }; + knownWords: { + highlightEnabled: boolean; + refreshMinutes: number; + addMinedWordsImmediately: boolean; + matchMode: NPlusOneMatchMode; + decks: Record; + color: string; + }; + nPlusOne: { + nPlusOne: string; + minSentenceWords: number; + }; + behavior: { + overwriteAudio: boolean; + overwriteImage: boolean; + mediaInsertMode: 'append' | 'prepend'; + highlightWord: boolean; + notificationType: 'osd' | 'system' | 'both' | 'none'; + autoUpdateNewCards: boolean; + }; + metadata: { + pattern: string; + }; + isLapis: { + enabled: boolean; + sentenceCardModel: string; + }; + isKiku: { + enabled: boolean; + fieldGrouping: 'auto' | 'manual' | 'disabled'; + deleteDuplicateInAuto: boolean; + }; + }; + shortcuts: Required; + secondarySub: Required; + subsync: Required; + startupWarmups: { + lowPowerMode: boolean; + mecab: boolean; + yomitanExtension: boolean; + subtitleDictionaries: boolean; + jellyfinRemoteSession: boolean; + }; + subtitleStyle: Required> & { + secondary: Required>; + frequencyDictionary: { + enabled: boolean; + sourcePath: string; + topX: number; + mode: FrequencyDictionaryMode; + matchMode: FrequencyDictionaryMatchMode; + singleColor: string; + bandedColors: [string, string, string, string, string]; + }; + }; + subtitleSidebar: Required; + auto_start_overlay: boolean; + jimaku: JimakuConfig & { + apiBaseUrl: string; + languagePreference: JimakuLanguagePreference; + maxEntryResults: number; + }; + anilist: { + enabled: boolean; + accessToken: string; + characterDictionary: { + enabled: boolean; + refreshTtlHours: number; + maxLoaded: number; + evictionPolicy: AnilistCharacterDictionaryEvictionPolicy; + profileScope: AnilistCharacterDictionaryProfileScope; + collapsibleSections: Required; + }; + }; + yomitan: { + externalProfilePath: string; + }; + jellyfin: { + enabled: boolean; + serverUrl: string; + username: string; + deviceId: string; + clientName: string; + clientVersion: string; + defaultLibraryId: string; + remoteControlEnabled: boolean; + remoteControlAutoConnect: boolean; + autoAnnounce: boolean; + remoteControlDeviceName: string; + pullPictures: boolean; + iconCacheDir: string; + directPlayPreferred: boolean; + directPlayContainers: string[]; + transcodeVideoCodec: string; + }; + discordPresence: { + enabled: boolean; + presenceStyle: import('./integrations').DiscordPresenceStylePreset; + updateIntervalMs: number; + debounceMs: number; + }; + ai: AiConfig & { + enabled: boolean; + apiKey: string; + apiKeyCommand: string; + baseUrl: string; + model: string; + systemPrompt: string; + requestTimeoutMs: number; + }; + youtube: YoutubeConfig & { + primarySubLanguages: string[]; + }; + youtubeSubgen: YoutubeSubgenConfig & { + whisperBin: string; + whisperModel: string; + whisperVadModel: string; + whisperThreads: number; + fixWithAi: boolean; + ai: AiFeatureConfig; + }; + immersionTracking: { + enabled: boolean; + dbPath?: string; + batchSize: number; + flushIntervalMs: number; + queueCap: number; + payloadCapBytes: number; + maintenanceIntervalMs: number; + retentionMode: ImmersionTrackingRetentionMode; + retentionPreset: ImmersionTrackingRetentionPreset; + retention: { + eventsDays: number; + telemetryDays: number; + sessionsDays: number; + dailyRollupsDays: number; + monthlyRollupsDays: number; + vacuumIntervalDays: number; + }; + lifetimeSummaries: { + global: boolean; + anime: boolean; + media: boolean; + }; + }; + stats: { + toggleKey: string; + markWatchedKey: string; + serverPort: number; + autoStartServer: boolean; + autoOpenBrowser: boolean; + }; + logging: { + level: 'debug' | 'info' | 'warn' | 'error'; + }; +} + +export interface ConfigValidationWarning { + path: string; + value: unknown; + fallback: unknown; + message: string; +} diff --git a/src/types/integrations.ts b/src/types/integrations.ts new file mode 100644 index 0000000..37cfe31 --- /dev/null +++ b/src/types/integrations.ts @@ -0,0 +1,238 @@ +import type { YoutubeTrackKind } from '../core/services/youtube/kinds'; + +export type JimakuLanguagePreference = 'ja' | 'en' | 'none'; +export type { YoutubeTrackKind }; + +export interface YoutubeTrackOption { + id: string; + language: string; + sourceLanguage: string; + kind: YoutubeTrackKind; + label: string; + title?: string; + downloadUrl?: string; + fileExtension?: string; +} + +export interface YoutubePickerOpenPayload { + sessionId: string; + url: string; + tracks: YoutubeTrackOption[]; + defaultPrimaryTrackId: string | null; + defaultSecondaryTrackId: string | null; + hasTracks: boolean; +} + +export type YoutubePickerResolveRequest = + | { + sessionId: string; + action: 'continue-without-subtitles'; + primaryTrackId: null; + secondaryTrackId: null; + } + | { + sessionId: string; + action: 'use-selected'; + primaryTrackId: string | null; + secondaryTrackId: string | null; + }; + +export interface YoutubePickerResolveResult { + ok: boolean; + message: string; +} + +export interface JimakuConfig { + apiKey?: string; + apiKeyCommand?: string; + apiBaseUrl?: string; + languagePreference?: JimakuLanguagePreference; + maxEntryResults?: number; +} + +export type AnilistCharacterDictionaryEvictionPolicy = 'disable' | 'delete'; +export type AnilistCharacterDictionaryProfileScope = 'all' | 'active'; +export type AnilistCharacterDictionaryCollapsibleSectionKey = + | 'description' + | 'characterInformation' + | 'voicedBy'; + +export interface AnilistCharacterDictionaryCollapsibleSectionsConfig { + description?: boolean; + characterInformation?: boolean; + voicedBy?: boolean; +} + +export interface AnilistCharacterDictionaryConfig { + enabled?: boolean; + refreshTtlHours?: number; + maxLoaded?: number; + evictionPolicy?: AnilistCharacterDictionaryEvictionPolicy; + profileScope?: AnilistCharacterDictionaryProfileScope; + collapsibleSections?: AnilistCharacterDictionaryCollapsibleSectionsConfig; +} + +export interface AnilistConfig { + enabled?: boolean; + accessToken?: string; + characterDictionary?: AnilistCharacterDictionaryConfig; +} + +export interface YomitanConfig { + externalProfilePath?: string; +} + +export interface JellyfinConfig { + enabled?: boolean; + serverUrl?: string; + username?: string; + deviceId?: string; + clientName?: string; + clientVersion?: string; + defaultLibraryId?: string; + remoteControlEnabled?: boolean; + remoteControlAutoConnect?: boolean; + autoAnnounce?: boolean; + remoteControlDeviceName?: string; + pullPictures?: boolean; + iconCacheDir?: string; + directPlayPreferred?: boolean; + directPlayContainers?: string[]; + transcodeVideoCodec?: string; +} + +export type DiscordPresenceStylePreset = 'default' | 'meme' | 'japanese' | 'minimal'; + +export interface DiscordPresenceConfig { + enabled?: boolean; + presenceStyle?: DiscordPresenceStylePreset; + updateIntervalMs?: number; + debounceMs?: number; +} + +export interface AiFeatureConfig { + enabled?: boolean; + model?: string; + systemPrompt?: string; +} + +export interface AiConfig { + enabled?: boolean; + apiKey?: string; + apiKeyCommand?: string; + baseUrl?: string; + model?: string; + systemPrompt?: string; + requestTimeoutMs?: number; +} + +export interface YoutubeConfig { + primarySubLanguages?: string[]; +} + +export interface YoutubeSubgenConfig { + whisperBin?: string; + whisperModel?: string; + whisperVadModel?: string; + whisperThreads?: number; + fixWithAi?: boolean; + ai?: AiFeatureConfig; +} + +export interface StatsConfig { + toggleKey?: string; + markWatchedKey?: string; + serverPort?: number; + autoStartServer?: boolean; + autoOpenBrowser?: boolean; +} + +export type ImmersionTrackingRetentionMode = 'preset' | 'advanced'; +export type ImmersionTrackingRetentionPreset = 'minimal' | 'balanced' | 'deep-history'; + +export interface ImmersionTrackingConfig { + enabled?: boolean; + dbPath?: string; + batchSize?: number; + flushIntervalMs?: number; + queueCap?: number; + payloadCapBytes?: number; + maintenanceIntervalMs?: number; + retentionMode?: ImmersionTrackingRetentionMode; + retentionPreset?: ImmersionTrackingRetentionPreset; + retention?: { + eventsDays?: number; + telemetryDays?: number; + sessionsDays?: number; + dailyRollupsDays?: number; + monthlyRollupsDays?: number; + vacuumIntervalDays?: number; + }; + lifetimeSummaries?: { + global?: boolean; + anime?: boolean; + media?: boolean; + }; +} + +export type JimakuConfidence = 'high' | 'medium' | 'low'; + +export interface JimakuMediaInfo { + title: string; + season: number | null; + episode: number | null; + confidence: JimakuConfidence; + filename: string; + rawTitle: string; +} + +export interface JimakuSearchQuery { + query: string; +} + +export interface JimakuEntryFlags { + anime?: boolean; + movie?: boolean; + adult?: boolean; + external?: boolean; + unverified?: boolean; +} + +export interface JimakuEntry { + id: number; + name: string; + english_name?: string | null; + japanese_name?: string | null; + flags?: JimakuEntryFlags; + last_modified?: string; +} + +export interface JimakuFilesQuery { + entryId: number; + episode?: number | null; +} + +export interface JimakuFileEntry { + name: string; + url: string; + size: number; + last_modified: string; +} + +export interface JimakuDownloadQuery { + entryId: number; + url: string; + name: string; +} + +export interface JimakuApiError { + error: string; + code?: number; + retryAfter?: number; +} + +export type JimakuApiResponse = { ok: true; data: T } | { ok: false; error: JimakuApiError }; + +export type JimakuDownloadResult = + | { ok: true; path: string } + | { ok: false; error: JimakuApiError }; diff --git a/src/types/runtime-options.ts b/src/types/runtime-options.ts new file mode 100644 index 0000000..18814b3 --- /dev/null +++ b/src/types/runtime-options.ts @@ -0,0 +1,31 @@ +export type RuntimeOptionId = + | 'anki.autoUpdateNewCards' + | 'subtitle.annotation.nPlusOne' + | 'subtitle.annotation.jlpt' + | 'subtitle.annotation.frequency' + | 'anki.kikuFieldGrouping' + | 'anki.nPlusOneMatchMode'; + +export type RuntimeOptionScope = 'ankiConnect' | 'subtitle'; + +export type RuntimeOptionValueType = 'boolean' | 'enum'; + +export type RuntimeOptionValue = boolean | string; + +export interface RuntimeOptionState { + id: RuntimeOptionId; + label: string; + scope: RuntimeOptionScope; + valueType: RuntimeOptionValueType; + value: RuntimeOptionValue; + allowedValues: RuntimeOptionValue[]; + requiresRestart: boolean; +} + +export interface RuntimeOptionApplyResult { + ok: boolean; + option?: RuntimeOptionState; + osdMessage?: string; + requiresRestart?: boolean; + error?: string; +} diff --git a/src/types/runtime.ts b/src/types/runtime.ts new file mode 100644 index 0000000..bf55555 --- /dev/null +++ b/src/types/runtime.ts @@ -0,0 +1,394 @@ +import type { + KikuFieldGroupingChoice, + KikuFieldGroupingRequestData, + KikuMergePreviewRequest, + KikuMergePreviewResponse, +} from './anki'; +import type { ResolvedConfig, ShortcutsConfig } from './config'; +import type { + JimakuApiResponse, + JimakuDownloadQuery, + JimakuDownloadResult, + JimakuEntry, + JimakuFileEntry, + JimakuFilesQuery, + JimakuMediaInfo, + JimakuSearchQuery, + YoutubePickerOpenPayload, + YoutubePickerResolveRequest, + YoutubePickerResolveResult, +} from './integrations'; +import type { + SecondarySubMode, + SubtitleData, + SubtitlePosition, + SubtitleSidebarConfig, + SubtitleSidebarSnapshot, + SubtitleStyleConfig, +} from './subtitle'; +import type { + RuntimeOptionApplyResult, + RuntimeOptionId, + RuntimeOptionState, + RuntimeOptionValue, +} from './runtime-options'; + +export interface WindowGeometry { + x: number; + y: number; + width: number; + height: number; +} + +export interface Keybinding { + key: string; + command: (string | number)[] | null; +} + +export interface MpvClient { + currentSubText: string; + currentVideoPath: string; + currentMediaTitle?: string | null; + currentTimePos: number; + currentSubStart: number; + currentSubEnd: number; + currentAudioStreamIndex: number | null; + requestProperty?: (name: string) => Promise; + send(command: { command: unknown[]; request_id?: number }): boolean; +} + +export interface SubsyncSourceTrack { + id: number; + label: string; +} + +export interface SubsyncManualPayload { + sourceTracks: SubsyncSourceTrack[]; +} + +export interface SubsyncManualRunRequest { + engine: 'alass' | 'ffsubsync'; + sourceTrackId?: number | null; +} + +export interface SubsyncResult { + ok: boolean; + message: string; +} + +export type ControllerButtonBinding = + | 'none' + | 'select' + | 'buttonSouth' + | 'buttonEast' + | 'buttonNorth' + | 'buttonWest' + | 'leftShoulder' + | 'rightShoulder' + | 'leftStickPress' + | 'rightStickPress' + | 'leftTrigger' + | 'rightTrigger'; + +export type ControllerAxisBinding = 'leftStickX' | 'leftStickY' | 'rightStickX' | 'rightStickY'; +export type ControllerTriggerInputMode = 'auto' | 'digital' | 'analog'; +export type ControllerAxisDirection = 'negative' | 'positive'; +export type ControllerDpadFallback = 'none' | 'horizontal' | 'vertical'; + +export interface ControllerNoneBinding { + kind: 'none'; +} + +export interface ControllerButtonInputBinding { + kind: 'button'; + buttonIndex: number; +} + +export interface ControllerAxisDirectionInputBinding { + kind: 'axis'; + axisIndex: number; + direction: ControllerAxisDirection; +} + +export interface ControllerAxisInputBinding { + kind: 'axis'; + axisIndex: number; + dpadFallback?: ControllerDpadFallback; +} + +export type ControllerDiscreteBindingConfig = + | ControllerButtonBinding + | ControllerNoneBinding + | ControllerButtonInputBinding + | ControllerAxisDirectionInputBinding; + +export type ResolvedControllerDiscreteBinding = + | ControllerNoneBinding + | ControllerButtonInputBinding + | ControllerAxisDirectionInputBinding; + +export type ControllerAxisBindingConfig = + | ControllerAxisBinding + | ControllerNoneBinding + | ControllerAxisInputBinding; + +export type ResolvedControllerAxisBinding = + | ControllerNoneBinding + | { + kind: 'axis'; + axisIndex: number; + dpadFallback: ControllerDpadFallback; + }; + +export interface ControllerBindingsConfig { + toggleLookup?: ControllerDiscreteBindingConfig; + closeLookup?: ControllerDiscreteBindingConfig; + toggleKeyboardOnlyMode?: ControllerDiscreteBindingConfig; + mineCard?: ControllerDiscreteBindingConfig; + quitMpv?: ControllerDiscreteBindingConfig; + previousAudio?: ControllerDiscreteBindingConfig; + nextAudio?: ControllerDiscreteBindingConfig; + playCurrentAudio?: ControllerDiscreteBindingConfig; + toggleMpvPause?: ControllerDiscreteBindingConfig; + leftStickHorizontal?: ControllerAxisBindingConfig; + leftStickVertical?: ControllerAxisBindingConfig; + rightStickHorizontal?: ControllerAxisBindingConfig; + rightStickVertical?: ControllerAxisBindingConfig; +} + +export interface ResolvedControllerBindingsConfig { + toggleLookup?: ResolvedControllerDiscreteBinding; + closeLookup?: ResolvedControllerDiscreteBinding; + toggleKeyboardOnlyMode?: ResolvedControllerDiscreteBinding; + mineCard?: ResolvedControllerDiscreteBinding; + quitMpv?: ResolvedControllerDiscreteBinding; + previousAudio?: ResolvedControllerDiscreteBinding; + nextAudio?: ResolvedControllerDiscreteBinding; + playCurrentAudio?: ResolvedControllerDiscreteBinding; + toggleMpvPause?: ResolvedControllerDiscreteBinding; + leftStickHorizontal?: ResolvedControllerAxisBinding; + leftStickVertical?: ResolvedControllerAxisBinding; + rightStickHorizontal?: ResolvedControllerAxisBinding; + rightStickVertical?: ResolvedControllerAxisBinding; +} + +export interface ControllerButtonIndicesConfig { + select?: number; + buttonSouth?: number; + buttonEast?: number; + buttonNorth?: number; + buttonWest?: number; + leftShoulder?: number; + rightShoulder?: number; + leftStickPress?: number; + rightStickPress?: number; + leftTrigger?: number; + rightTrigger?: number; +} + +export interface ControllerConfig { + enabled?: boolean; + preferredGamepadId?: string; + preferredGamepadLabel?: string; + smoothScroll?: boolean; + scrollPixelsPerSecond?: number; + horizontalJumpPixels?: number; + stickDeadzone?: number; + triggerInputMode?: ControllerTriggerInputMode; + triggerDeadzone?: number; + repeatDelayMs?: number; + repeatIntervalMs?: number; + buttonIndices?: ControllerButtonIndicesConfig; + bindings?: ControllerBindingsConfig; +} + +export interface ControllerPreferenceUpdate { + preferredGamepadId: string; + preferredGamepadLabel: string; +} + +export type ControllerConfigUpdate = ControllerConfig; + +export interface ControllerDeviceInfo { + id: string; + index: number; + mapping: string; + connected: boolean; +} + +export interface ControllerButtonSnapshot { + value: number; + pressed: boolean; + touched?: boolean; +} + +export interface ControllerRuntimeSnapshot { + connectedGamepads: ControllerDeviceInfo[]; + activeGamepadId: string | null; + rawAxes: number[]; + rawButtons: ControllerButtonSnapshot[]; +} + +export interface MpvSubtitleRenderMetrics { + subPos: number; + subFontSize: number; + subScale: number; + subMarginY: number; + subMarginX: number; + subFont: string; + subSpacing: number; + subBold: boolean; + subItalic: boolean; + subBorderSize: number; + subShadowOffset: number; + subAssOverride: string; + subScaleByWindow: boolean; + subUseMargins: boolean; + osdHeight: number; + osdDimensions: { + w: number; + h: number; + ml: number; + mr: number; + mt: number; + mb: number; + } | null; +} + +export type OverlayLayer = 'visible'; + +export interface OverlayContentRect { + x: number; + y: number; + width: number; + height: number; +} + +export interface OverlayContentMeasurement { + layer: OverlayLayer; + measuredAtMs: number; + viewport: { + width: number; + height: number; + }; + contentRect: OverlayContentRect | null; +} + +export interface MecabStatus { + available: boolean; + enabled: boolean; + path: string | null; +} + +export interface ClipboardAppendResult { + ok: boolean; + message: string; +} + +export interface ConfigHotReloadPayload { + keybindings: Keybinding[]; + subtitleStyle: SubtitleStyleConfig | null; + subtitleSidebar: Required; + secondarySubMode: SecondarySubMode; +} + +export type ResolvedControllerConfig = ResolvedConfig['controller']; + +export interface ElectronAPI { + getOverlayLayer: () => 'visible' | 'modal' | null; + onSubtitle: (callback: (data: SubtitleData) => void) => void; + onVisibility: (callback: (visible: boolean) => void) => void; + onSubtitlePosition: (callback: (position: SubtitlePosition | null) => void) => void; + getOverlayVisibility: () => Promise; + getCurrentSubtitle: () => Promise; + getCurrentSubtitleRaw: () => Promise; + getCurrentSubtitleAss: () => Promise; + getSubtitleSidebarSnapshot: () => Promise; + getPlaybackPaused: () => Promise; + onSubtitleAss: (callback: (assText: string) => void) => void; + setIgnoreMouseEvents: (ignore: boolean, options?: { forward?: boolean }) => void; + openYomitanSettings: () => void; + recordYomitanLookup: () => void; + getSubtitlePosition: () => Promise; + saveSubtitlePosition: (position: SubtitlePosition) => void; + getMecabStatus: () => Promise; + setMecabEnabled: (enabled: boolean) => void; + sendMpvCommand: (command: (string | number)[]) => void; + getKeybindings: () => Promise; + getConfiguredShortcuts: () => Promise>; + getStatsToggleKey: () => Promise; + getMarkWatchedKey: () => Promise; + markActiveVideoWatched: () => Promise; + getControllerConfig: () => Promise; + saveControllerConfig: (update: ControllerConfigUpdate) => Promise; + saveControllerPreference: (update: ControllerPreferenceUpdate) => Promise; + getJimakuMediaInfo: () => Promise; + jimakuSearchEntries: (query: JimakuSearchQuery) => Promise>; + jimakuListFiles: (query: JimakuFilesQuery) => Promise>; + jimakuDownloadFile: (query: JimakuDownloadQuery) => Promise; + quitApp: () => void; + toggleDevTools: () => void; + toggleOverlay: () => void; + toggleStatsOverlay: () => void; + getAnkiConnectStatus: () => Promise; + setAnkiConnectEnabled: (enabled: boolean) => void; + clearAnkiConnectHistory: () => void; + onSecondarySub: (callback: (text: string) => void) => void; + onSecondarySubMode: (callback: (mode: SecondarySubMode) => void) => void; + getSecondarySubMode: () => Promise; + getCurrentSecondarySub: () => Promise; + focusMainWindow: () => Promise; + getSubtitleStyle: () => Promise; + onSubsyncManualOpen: (callback: (payload: SubsyncManualPayload) => void) => void; + runSubsyncManual: (request: SubsyncManualRunRequest) => Promise; + onKikuFieldGroupingRequest: (callback: (data: KikuFieldGroupingRequestData) => void) => void; + kikuBuildMergePreview: (request: KikuMergePreviewRequest) => Promise; + kikuFieldGroupingRespond: (choice: KikuFieldGroupingChoice) => void; + getRuntimeOptions: () => Promise; + setRuntimeOptionValue: ( + id: RuntimeOptionId, + value: RuntimeOptionValue, + ) => Promise; + cycleRuntimeOption: (id: RuntimeOptionId, direction: 1 | -1) => Promise; + onRuntimeOptionsChanged: (callback: (options: RuntimeOptionState[]) => void) => void; + onOpenRuntimeOptions: (callback: () => void) => void; + onOpenJimaku: (callback: () => void) => void; + onOpenYoutubeTrackPicker: (callback: (payload: YoutubePickerOpenPayload) => void) => void; + onCancelYoutubeTrackPicker: (callback: () => void) => void; + onKeyboardModeToggleRequested: (callback: () => void) => void; + onLookupWindowToggleRequested: (callback: () => void) => void; + appendClipboardVideoToQueue: () => Promise; + youtubePickerResolve: ( + request: YoutubePickerResolveRequest, + ) => Promise; + notifyOverlayModalClosed: ( + modal: + | 'runtime-options' + | 'subsync' + | 'jimaku' + | 'youtube-track-picker' + | 'kiku' + | 'controller-select' + | 'controller-debug' + | 'subtitle-sidebar', + ) => void; + notifyOverlayModalOpened: ( + modal: + | 'runtime-options' + | 'subsync' + | 'jimaku' + | 'youtube-track-picker' + | 'kiku' + | 'controller-select' + | 'controller-debug' + | 'subtitle-sidebar', + ) => void; + reportOverlayContentBounds: (measurement: OverlayContentMeasurement) => void; + onConfigHotReload: (callback: (payload: ConfigHotReloadPayload) => void) => void; +} + +declare global { + interface Window { + electronAPI: ElectronAPI; + } +} diff --git a/src/types/subtitle.ts b/src/types/subtitle.ts new file mode 100644 index 0000000..d744b20 --- /dev/null +++ b/src/types/subtitle.ts @@ -0,0 +1,195 @@ +import type { SubtitleCue } from '../core/services/subtitle-cue-parser'; + +export enum PartOfSpeech { + noun = 'noun', + verb = 'verb', + i_adjective = 'i_adjective', + na_adjective = 'na_adjective', + particle = 'particle', + bound_auxiliary = 'bound_auxiliary', + symbol = 'symbol', + other = 'other', +} + +export interface Token { + word: string; + partOfSpeech: PartOfSpeech; + pos1: string; + pos2: string; + pos3: string; + pos4: string; + inflectionType: string; + inflectionForm: string; + headword: string; + katakanaReading: string; + pronunciation: string; +} + +export interface MergedToken { + surface: string; + reading: string; + headword: string; + startPos: number; + endPos: number; + partOfSpeech: PartOfSpeech; + pos1?: string; + pos2?: string; + pos3?: string; + isMerged: boolean; + isKnown: boolean; + isNPlusOneTarget: boolean; + isNameMatch?: boolean; + jlptLevel?: JlptLevel; + frequencyRank?: number; +} + +export type FrequencyDictionaryLookup = (term: string) => number | null; + +export type JlptLevel = 'N1' | 'N2' | 'N3' | 'N4' | 'N5'; + +export interface SubtitlePosition { + yPercent: number; +} + +export interface SubtitleStyle { + fontSize: number; +} + +export type SecondarySubMode = 'hidden' | 'visible' | 'hover'; + +export interface SecondarySubConfig { + secondarySubLanguages?: string[]; + autoLoadSecondarySub?: boolean; + defaultMode?: SecondarySubMode; +} + +export type NPlusOneMatchMode = 'headword' | 'surface'; +export type FrequencyDictionaryMatchMode = 'headword' | 'surface'; + +export interface SubtitleStyleConfig { + enableJlpt?: boolean; + preserveLineBreaks?: boolean; + autoPauseVideoOnHover?: boolean; + autoPauseVideoOnYomitanPopup?: boolean; + hoverTokenColor?: string; + hoverTokenBackgroundColor?: string; + nameMatchEnabled?: boolean; + nameMatchColor?: string; + fontFamily?: string; + fontSize?: number; + fontColor?: string; + fontWeight?: string | number; + fontStyle?: string; + lineHeight?: string | number; + letterSpacing?: string; + wordSpacing?: string | number; + fontKerning?: string; + textRendering?: string; + textShadow?: string; + backdropFilter?: string; + backgroundColor?: string; + nPlusOneColor?: string; + knownWordColor?: string; + jlptColors?: { + N1: string; + N2: string; + N3: string; + N4: string; + N5: string; + }; + frequencyDictionary?: { + enabled?: boolean; + sourcePath?: string; + topX?: number; + mode?: FrequencyDictionaryMode; + matchMode?: FrequencyDictionaryMatchMode; + singleColor?: string; + bandedColors?: [string, string, string, string, string]; + }; + secondary?: { + fontFamily?: string; + fontSize?: number; + fontColor?: string; + fontWeight?: string | number; + fontStyle?: string; + lineHeight?: string | number; + letterSpacing?: string; + wordSpacing?: string | number; + fontKerning?: string; + textRendering?: string; + textShadow?: string; + backdropFilter?: string; + backgroundColor?: string; + }; +} + +export interface TokenPos1ExclusionConfig { + defaults?: string[]; + add?: string[]; + remove?: string[]; +} + +export interface ResolvedTokenPos1ExclusionConfig { + defaults: string[]; + add: string[]; + remove: string[]; +} + +export interface TokenPos2ExclusionConfig { + defaults?: string[]; + add?: string[]; + remove?: string[]; +} + +export interface ResolvedTokenPos2ExclusionConfig { + defaults: string[]; + add: string[]; + remove: string[]; +} + +export type FrequencyDictionaryMode = 'single' | 'banded'; + +export type { SubtitleCue }; + +export type SubtitleSidebarLayout = 'overlay' | 'embedded'; + +export interface SubtitleSidebarConfig { + enabled?: boolean; + autoOpen?: boolean; + layout?: SubtitleSidebarLayout; + toggleKey?: string; + pauseVideoOnHover?: boolean; + autoScroll?: boolean; + maxWidth?: number; + opacity?: number; + backgroundColor?: string; + textColor?: string; + fontFamily?: string; + fontSize?: number; + timestampColor?: string; + activeLineColor?: string; + activeLineBackgroundColor?: string; + hoverLineBackgroundColor?: string; +} + +export interface SubtitleData { + text: string; + tokens: MergedToken[] | null; + startTime?: number | null; + endTime?: number | null; +} + +export interface SubtitleSidebarSnapshot { + cues: SubtitleCue[]; + currentTimeSec?: number | null; + currentSubtitle: { + text: string; + startTime: number | null; + endTime: number | null; + }; + config: Required; +} + +export interface SubtitleHoverTokenPayload { + tokenIndex: number | null; +}