25 Commits

Author SHA1 Message Date
e2a7597b4f update README 2026-03-08 22:10:09 -07:00
2e59c21078 chore: prep v0.5.3 release 2026-03-08 22:08:46 -07:00
7b5ab3294d chore: cut v0.5.2 release 2026-03-08 20:46:16 -07:00
2bbf38f987 fix: pin signpath artifact configuration 2026-03-08 20:44:00 -07:00
f09c91494d chore: cut v0.5.1 release 2026-03-08 20:28:16 -07:00
58ec9b76e0 fix: harden windows release signing workflow 2026-03-08 20:24:47 -07:00
7a196f69d6 update submodules 2026-03-08 20:03:05 -07:00
c799a8de3c Prepare Windows release and signing process (#16) 2026-03-08 19:51:30 -07:00
34d2dce8dc chore(vendor): bump texthooker-ui 2026-03-08 16:11:33 -07:00
3a22a97761 chore(vendor): bump subminer-yomitan 2026-03-08 16:11:26 -07:00
962243e959 docs(backlog): sync recent task records 2026-03-08 16:11:20 -07:00
021010a338 test(ai): cover shared client helpers 2026-03-08 16:11:13 -07:00
4c0575afe0 refactor(youtube): extract subtitle generation pipeline 2026-03-08 16:10:56 -07:00
9e46176519 feat(ai): split shared provider config from Anki runtime 2026-03-08 16:10:51 -07:00
f10e905dbd build: enforce changelog workflow in CI 2026-03-08 16:10:37 -07:00
e4aa8ff907 fix: gate macOS overlay shortcuts by mpv focus 2026-03-08 16:04:43 -07:00
a6ece5388a fix(launcher): remove youtube subtitle mode 2026-03-08 16:03:24 -07:00
6a44b54b51 fix: reuse background overlay across mpv reconnects 2026-03-08 16:02:57 -07:00
93cd688625 fix: harden AI subtitle fix response parsing 2026-03-08 16:01:40 -07:00
8e319a417d fix: skip aniskip for url playback 2026-03-08 16:01:19 -07:00
38034db1e4 fix: reuse background tokenization warmups 2026-03-08 16:01:11 -07:00
f775f90360 fix: improve secondary subtitle readability 2026-03-07 23:51:23 -08:00
55dff6ced7 clean up assets 2026-03-07 23:36:29 -08:00
d0c11d347b refactor: remove root node and npm workflow deps 2026-03-07 21:19:14 -08:00
f0418c6e56 fix: repair GitHub release publish workflow 2026-03-07 19:43:35 -08:00
237 changed files with 10674 additions and 1431 deletions

3
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,3 @@
## Checklist
- [ ] Added a changelog fragment in `changes/`, or this PR is labeled `skip-changelog`

View File

@@ -13,6 +13,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: true
- name: Setup Bun
@@ -20,11 +21,6 @@ jobs:
with:
bun-version: 1.3.5
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22.12.0
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -39,6 +35,13 @@ jobs:
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Lint changelog fragments
run: bun run changelog:lint
- name: Enforce pull request changelog fragments (`skip-changelog` label bypass)
if: github.event_name == 'pull_request'
run: bun run changelog:pr-check --base-ref "origin/${{ github.base_ref }}" --head-ref "HEAD" --labels "${{ join(github.event.pull_request.labels.*.name, ',') }}"
- name: Build (TypeScript check)
# Keep explicit typecheck for fast fail before full build/bundle.
run: bun run typecheck

View File

@@ -26,11 +26,6 @@ jobs:
with:
bun-version: 1.3.5
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22.12.0
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -85,11 +80,6 @@ jobs:
with:
bun-version: 1.3.5
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22.12.0
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -113,8 +103,6 @@ jobs:
- name: Build AppImage
run: bun run build:appimage
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Build unversioned AppImage
run: |
@@ -147,11 +135,6 @@ jobs:
with:
bun-version: 1.3.5
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22.12.0
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -196,7 +179,6 @@ jobs:
- name: Build signed + notarized macOS artifacts
run: bun run build:mac
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: ${{ secrets.CSC_LINK }}
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }}
@@ -211,8 +193,56 @@ jobs:
release/*.dmg
release/*.zip
build-windows:
needs: [quality-gate]
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: 1.3.5
- name: Cache dependencies
uses: actions/cache@v4
with:
path: |
~/.bun/install/cache
node_modules
vendor/texthooker-ui/node_modules
vendor/subminer-yomitan/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
restore-keys: |
${{ runner.os }}-bun-
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Build texthooker-ui
shell: powershell
run: |
Set-Location vendor/texthooker-ui
bun install
bun run build
- name: Build unsigned Windows artifacts
run: bun run build:win:unsigned
- name: Upload Windows artifacts
uses: actions/upload-artifact@v4
with:
name: windows
path: |
release/*.exe
release/*.zip
if-no-files-found: error
release:
needs: [build-linux, build-macos]
needs: [build-linux, build-macos, build-windows]
runs-on: ubuntu-latest
steps:
- name: Checkout
@@ -232,6 +262,12 @@ jobs:
name: macos
path: release
- name: Download Windows artifacts
uses: actions/download-artifact@v4
with:
name: windows
path: release
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
@@ -270,7 +306,7 @@ jobs:
- name: Generate checksums
run: |
shopt -s nullglob
files=(release/*.AppImage release/*.dmg release/*.zip release/*.tar.gz dist/launcher/subminer)
files=(release/*.AppImage release/*.dmg release/*.exe release/*.zip release/*.tar.gz dist/launcher/subminer)
if [ "${#files[@]}" -eq 0 ]; then
echo "No release artifacts found for checksum generation."
exit 1
@@ -281,23 +317,11 @@ jobs:
id: version
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- name: Generate changelog
id: changelog
run: |
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
if [ -n "$PREV_TAG" ]; then
CHANGES=$(git log --pretty=format:"- %s" ${PREV_TAG}..HEAD)
else
COMMIT_COUNT=$(git rev-list --count HEAD)
if [ "$COMMIT_COUNT" -gt 10 ]; then
CHANGES=$(git log --pretty=format:"- %s" HEAD~10..HEAD)
else
CHANGES=$(git log --pretty=format:"- %s")
fi
fi
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGES" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Verify changelog is ready for tagged release
run: bun run changelog:check --version "${{ steps.version.outputs.VERSION }}"
- name: Generate release notes from changelog
run: bun run changelog:release-notes --version "${{ steps.version.outputs.VERSION }}"
- name: Publish Release
env:
@@ -305,53 +329,23 @@ jobs:
run: |
set -euo pipefail
cat > release-body.md <<'EOF'
## Changes
${{ steps.changelog.outputs.CHANGES }}
## Installation
### AppImage (Recommended)
1. Download the AppImage below
2. Make it executable: `chmod +x SubMiner.AppImage`
3. Run: `./SubMiner.AppImage`
### macOS
1. Download `subminer-*.dmg`
2. Open the DMG and drag `SubMiner.app` into `/Applications`
3. If needed, use the ZIP artifact as an alternative
### Manual Installation
See the [README](https://github.com/${{ github.repository }}#installation) for manual installation instructions.
### Optional Assets (config example + mpv plugin + rofi theme)
1. Download `subminer-assets.tar.gz`
2. Extract and copy `config.example.jsonc` to `~/.config/SubMiner/config.jsonc`
3. Copy `plugin/subminer/` directory contents to `~/.config/mpv/scripts/`
4. Copy `plugin/subminer.conf` to `~/.config/mpv/script-opts/`
5. Copy `assets/themes/subminer.rasi` to:
- Linux: `~/.local/share/SubMiner/themes/subminer.rasi`
- macOS: `~/Library/Application Support/SubMiner/themes/subminer.rasi`
Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.
EOF
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
# Do not pass the prerelease flag here; gh defaults to a normal release.
gh release edit "${{ steps.version.outputs.VERSION }}" \
--draft=false \
--title "${{ steps.version.outputs.VERSION }}" \
--notes-file release-body.md \
--prerelease false
--notes-file release/release-notes.md
else
gh release create "${{ steps.version.outputs.VERSION }}" \
--title "${{ steps.version.outputs.VERSION }}" \
--notes-file release-body.md \
--prerelease false
--notes-file release/release-notes.md
fi
shopt -s nullglob
artifacts=(
release/*.AppImage
release/*.dmg
release/*.exe
release/*.zip
release/*.tar.gz
release/SHA256SUMS.txt

2
.gitignore vendored
View File

@@ -37,4 +37,4 @@ tests/*
.worktrees/
.codex/*
.agents/*
docs/*
favicon.png

View File

@@ -1,3 +1,60 @@
# AGENTS.MD
## PR Feedback
- Active PR: `gh pr view --json number,title,url --jq '"PR #\\(.number): \\(.title)\\n\\(.url)"'`.
- PR comments: `gh pr view …` + `gh api …/comments --paginate`.
- Replies: cite fix + file/line; resolve threads only after fix lands.
- When merging a PR: thank the contributor in `CHANGELOG.md`.
## Changelog
- User-visible PRs: add one fragment in `changes/*.md`.
- Fragment format:
`type: added|changed|fixed|docs|internal`
`area: <short-area>`
blank line
`- bullet`
- `changes/README.md`: instructions only; generator ignores it.
- No release-note entry wanted: use PR label `skip-changelog`.
- CI runs `bun run changelog:lint` + `bun run changelog:pr-check` on PRs.
- Release prep: `bun run changelog:build`, review `CHANGELOG.md` + `release/release-notes.md`, commit generated changelog + fragment deletions, then tag.
- Release CI expects committed changelog entry already present; do not rely on tag job to invent notes.
## Flow & Runtime
- Use repos package manager/runtime; no swaps w/o approval.
- Use Codex background for long jobs; tmux only for interactive/persistent (debugger/server).
## Build / Test
- Before handoff: run full gate (lint/typecheck/tests/docs).
- CI red: `gh run list/view`, rerun, fix, push, repeat til green.
- Keep it observable (logs, panes, tails, MCP/browser tools).
- Release: read `docs/RELEASING.md`
## Git
- Safe by default: `git status/diff/log`. Push only when user asks.
- `git checkout` ok for PR review / explicit request.
- Branch changes require user consent.
- Destructive ops forbidden unless explicit (`reset --hard`, `clean`, `restore`, `rm`, …).
- Dont delete/rename unexpected stuff; stop + ask.
- No repo-wide S/R scripts; keep edits small/reviewable.
- Avoid manual `git stash`; if Git auto-stashes during pull/rebase, thats fine (hint, not hard guardrail).
- If user types a command (“pull and push”), thats consent for that command.
- No amend unless asked.
- Big review: `git --no-pager diff --color=never`.
- Multi-agent: check `git status/diff` before edits; ship small commits.
## Language/Stack Notes
- Swift: use workspace helper/daemon; validate `swift build` + tests; keep concurrency attrs right.
- TypeScript: use repo PM; keep files small; follow existing patterns.
## macOS Permissions / Signing (TCC)
- Never re-sign / ad-hoc sign / change bundle ID as “debug” without explicit ok (can mess TCC).
<!-- BACKLOG.MD MCP GUIDELINES START -->
@@ -17,6 +74,7 @@ This project uses Backlog.md MCP for all task and project management activities.
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
These guides cover:
- Decision framework for when to create tasks
- Search-first workflow to avoid duplicates
- Links to detailed guides for task creation, execution, and finalization

72
CHANGELOG.md Normal file
View File

@@ -0,0 +1,72 @@
# Changelog
## v0.5.3 (2026-03-09)
### Changed
- Release: Publish unsigned Windows `.exe` and `.zip` artifacts directly from release CI instead of routing them through SignPath.
- Release: Added `bun run build:win:unsigned` for explicit local unsigned Windows packaging.
## v0.5.2 (2026-03-09)
### Internal
- Release: Pinned the Windows SignPath submission workflow to an explicit artifact-configuration slug instead of relying on the SignPath project's default configuration.
## v0.5.1 (2026-03-09)
### Changed
- Launcher: Removed the YouTube subtitle generation mode switch so YouTube playback always preloads subtitles before mpv starts.
### Fixed
- Launcher: Hardened YouTube AI subtitle fixing so fenced SRT output and text-only one-cue-per-block responses can still be applied without losing original cue timing.
- Launcher: Skipped AniSkip lookup during URL playback and YouTube subtitle-preload playback, limiting AniSkip to local file targets where it can actually resolve anime metadata.
- Launcher: Keep the background SubMiner process running after a launcher-managed mpv session exits so the next mpv instance can reconnect without restarting the app.
- Launcher: Reuse prior tokenization readiness after the background app is already warm so reopening a video does not pause again waiting for duplicate warmup completion.
- Windows: Acquire the app single-instance lock earlier so Windows overlay/video launches reuse the running background SubMiner process instead of booting a second full app and repeating startup warmups.
## v0.3.0 (2026-03-05)
- Added keyboard-driven Yomitan navigation and popup controls, including optional auto-pause.
- Added subtitle/jump keyboard handling fixes for smoother subtitle playback control.
- Improved Anki/Yomitan reliability with stronger Yomitan proxy syncing and safer extension refresh logic.
- Added Subsync `replace` option and deterministic retime naming for subtitle workflows.
- Moved aniskip resolution to launcher-script options for better control.
- Tuned tokenizer frequency highlighting filters for improved term visibility.
- Added release build quality-of-life for CLI publish (`gh`-based clobber upload).
- Removed docs Plausible integration and cleaned associated tracker settings.
## v0.2.3 (2026-03-02)
- Added performance and tokenization optimizations (faster warmup, persistent MeCab usage, reduced enrichment lookups).
- Added subtitle controls for no-jump delay shifts.
- Improved subtitle highlight logic with priority and reliability fixes.
- Fixed plugin loading behavior to keep OSD visible during startup.
- Fixed Jellyfin remote resume behavior and improved autoplay/tokenization interaction.
- Updated startup flow to load dictionaries asynchronously and unblock first tokenization sooner.
## v0.2.2 (2026-03-01)
- Improved subtitle highlighting reliability for frequency modes.
- Fixed Jellyfin misc info formatting cleanup.
- Version bump maintenance for 0.2.2.
## v0.2.1 (2026-03-01)
- Delivered Jellyfin and Subsync fixes from release patch cycle.
- Version bump maintenance for 0.2.1.
## v0.2.0 (2026-03-01)
- Added task-related release work for the overlay 2.0 cycle.
- Introduced Overlay 2.0.
- Improved release automation reliability.
## v0.1.2 (2026-02-24)
- Added encrypted AniList token handling and default GNOME keyring support.
- Added launcher passthrough for password-store flows (Jellyfin path).
- Updated docs for auth and integration behavior.
- Version bump maintenance for 0.1.2.
## v0.1.1 (2026-02-23)
- Fixed overlay modal focus handling (`grab input`) behavior.
- Version bump maintenance for 0.1.1.
## v0.1.0 (2026-02-23)
- Bootstrapped Electron runtime, services, and composition model.
- Added runtime asset packaging and dependency vendoring.
- Added project docs baseline, setup guides, architecture notes, and submodule/runtime assets.
- Added CI release job dependency ordering fixes before launcher build.

View File

@@ -1,4 +1,4 @@
.PHONY: help deps build build-launcher install build-linux build-macos build-macos-unsigned clean install-linux install-macos install-plugin uninstall uninstall-linux uninstall-macos print-dirs pretty ensure-bun generate-config generate-example-config dev-start dev-start-macos dev-watch dev-watch-macos dev-toggle dev-stop
.PHONY: help deps build build-launcher install build-linux build-macos build-macos-unsigned clean install-linux install-macos install-windows install-plugin uninstall uninstall-linux uninstall-macos uninstall-windows print-dirs pretty ensure-bun generate-config generate-example-config dev-start dev-start-macos dev-watch dev-watch-macos dev-toggle dev-stop
APP_NAME := subminer
THEME_SOURCE := assets/themes/subminer.rasi
@@ -20,11 +20,6 @@ MACOS_DATA_DIR ?= $(HOME)/Library/Application Support/SubMiner
MACOS_APP_DIR ?= $(HOME)/Applications
MACOS_APP_DEST ?= $(MACOS_APP_DIR)/SubMiner.app
# mpv plugin install directories.
MPV_CONFIG_DIR ?= $(HOME)/.config/mpv
MPV_SCRIPTS_DIR ?= $(MPV_CONFIG_DIR)/scripts
MPV_SCRIPT_OPTS_DIR ?= $(MPV_CONFIG_DIR)/script-opts
# If building from source, the AppImage will typically land in release/.
APPIMAGE_SRC := $(firstword $(wildcard release/SubMiner-*.AppImage))
MACOS_APP_SRC := $(firstword $(wildcard release/*.app release/*/*.app))
@@ -41,6 +36,17 @@ else
PLATFORM := unknown
endif
WINDOWS_APPDATA ?= $(if $(APPDATA),$(subst \,/,$(APPDATA)),$(HOME)/AppData/Roaming)
# mpv plugin install directories.
ifeq ($(PLATFORM),windows)
MPV_CONFIG_DIR ?= $(WINDOWS_APPDATA)/mpv
else
MPV_CONFIG_DIR ?= $(HOME)/.config/mpv
endif
MPV_SCRIPTS_DIR ?= $(MPV_CONFIG_DIR)/scripts
MPV_SCRIPT_OPTS_DIR ?= $(MPV_CONFIG_DIR)/script-opts
help:
@printf '%s\n' \
"Targets:" \
@@ -58,6 +64,7 @@ help:
" dev-stop Stop a running local Electron app" \
" install-linux Install Linux wrapper/theme/app artifacts" \
" install-macos Install macOS wrapper/theme/app artifacts" \
" install-windows Install Windows mpv plugin artifacts" \
" install-plugin Install mpv Lua plugin and plugin config" \
" generate-config Generate ~/.config/SubMiner/config.jsonc from centralized defaults" \
"" \
@@ -65,6 +72,7 @@ help:
" deps Install JS dependencies (root + texthooker-ui)" \
" uninstall-linux Remove Linux install artifacts" \
" uninstall-macos Remove macOS install artifacts" \
" uninstall-windows Remove Windows mpv plugin artifacts" \
" print-dirs Show resolved install locations" \
"" \
"Variables:" \
@@ -74,7 +82,7 @@ help:
" LINUX_DATA_DIR=... Override Linux app data dir" \
" MACOS_DATA_DIR=... Override macOS app data dir" \
" MACOS_APP_DIR=... Override macOS app install dir (default: $$HOME/Applications)" \
" MPV_CONFIG_DIR=... Override mpv config dir (default: $$HOME/.config/mpv)"
" MPV_CONFIG_DIR=... Override mpv config dir (default: $$HOME/.config/mpv or %APPDATA%/mpv on Windows)"
print-dirs:
@printf '%s\n' \
@@ -85,6 +93,10 @@ print-dirs:
"MACOS_DATA_DIR=$(MACOS_DATA_DIR)" \
"MACOS_APP_DIR=$(MACOS_APP_DIR)" \
"MACOS_APP_DEST=$(MACOS_APP_DEST)" \
"WINDOWS_APPDATA=$(WINDOWS_APPDATA)" \
"MPV_CONFIG_DIR=$(MPV_CONFIG_DIR)" \
"MPV_SCRIPTS_DIR=$(MPV_SCRIPTS_DIR)" \
"MPV_SCRIPT_OPTS_DIR=$(MPV_SCRIPT_OPTS_DIR)" \
"APPIMAGE_SRC=$(APPIMAGE_SRC)" \
"MACOS_APP_SRC=$(MACOS_APP_SRC)" \
"MACOS_ZIP_SRC=$(MACOS_ZIP_SRC)"
@@ -105,6 +117,7 @@ build:
@case "$(PLATFORM)" in \
linux) $(MAKE) --no-print-directory build-linux ;; \
macos) $(MAKE) --no-print-directory build-macos ;; \
windows) printf '%s\n' "[INFO] Windows builds run via: bun run build:win" ;; \
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
esac
@@ -113,6 +126,7 @@ install:
@case "$(PLATFORM)" in \
linux) $(MAKE) --no-print-directory install-linux ;; \
macos) $(MAKE) --no-print-directory install-macos ;; \
windows) $(MAKE) --no-print-directory install-windows ;; \
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
esac
@@ -210,18 +224,31 @@ install-macos: build-launcher
fi
@printf '%s\n' "Installed to:" " $(BINDIR)/subminer" " $(MACOS_DATA_DIR)/themes/$(THEME_FILE)" " $(MACOS_APP_DEST)"
install-windows:
@printf '%s\n' "[INFO] Installing Windows mpv plugin artifacts"
@$(MAKE) --no-print-directory install-plugin
install-plugin:
@printf '%s\n' "[INFO] Installing mpv plugin artifacts"
@install -d "$(MPV_SCRIPTS_DIR)"
@rm -f "$(MPV_SCRIPTS_DIR)/subminer.lua"
@rm -f "$(MPV_SCRIPTS_DIR)/subminer.lua" "$(MPV_SCRIPTS_DIR)/subminer-loader.lua"
@install -d "$(MPV_SCRIPTS_DIR)/subminer"
@install -d "$(MPV_SCRIPT_OPTS_DIR)"
@cp -R ./plugin/subminer/. "$(MPV_SCRIPTS_DIR)/subminer/"
@install -m 0644 "./$(PLUGIN_CONF)" "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
@if [ "$(PLATFORM)" = "windows" ]; then \
bun ./scripts/configure-plugin-binary-path.mjs "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf" "$(CURDIR)" win32; \
fi
@printf '%s\n' "Installed to:" " $(MPV_SCRIPTS_DIR)/subminer/main.lua" " $(MPV_SCRIPTS_DIR)/subminer/" " $(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
# Uninstall behavior kept unchanged by default.
uninstall: uninstall-linux
uninstall:
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"
@case "$(PLATFORM)" in \
linux) $(MAKE) --no-print-directory uninstall-linux ;; \
macos) $(MAKE) --no-print-directory uninstall-macos ;; \
windows) $(MAKE) --no-print-directory uninstall-windows ;; \
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
esac
uninstall-linux:
@rm -f "$(BINDIR)/subminer" "$(BINDIR)/SubMiner.AppImage"
@@ -233,3 +260,8 @@ uninstall-macos:
@rm -f "$(MACOS_DATA_DIR)/themes/$(THEME_FILE)"
@rm -rf "$(MACOS_APP_DEST)"
@printf '%s\n' "Removed:" " $(BINDIR)/subminer" " $(MACOS_DATA_DIR)/themes/$(THEME_FILE)" " $(MACOS_APP_DEST)"
uninstall-windows:
@rm -rf "$(MPV_SCRIPTS_DIR)/subminer"
@rm -f "$(MPV_SCRIPTS_DIR)/subminer.lua" "$(MPV_SCRIPTS_DIR)/subminer-loader.lua" "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
@printf '%s\n' "Removed:" " $(MPV_SCRIPTS_DIR)/subminer" " $(MPV_SCRIPT_OPTS_DIR)/subminer.conf"

View File

@@ -5,7 +5,7 @@
<br /><br />
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
[![Linux](https://img.shields.io/badge/platform-Linux%20%7C%20macOS-informational)]()
[![Linux](https://img.shields.io/badge/platform-Linux%20%7C%20macOS%20%7C%20Windows-informational)]()
[![Docs](https://img.shields.io/badge/docs-docs.subminer.moe-blueviolet)](https://docs.subminer.moe)
</div>
@@ -54,15 +54,22 @@ chmod +x ~/.local/bin/subminer
> [!NOTE]
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
**From source** or **macOS** — initialize submodules first (`git submodule update --init --recursive`). Source builds now also require Node.js 22 + npm because bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
**macOS (DMG/ZIP):** download the latest packaged build from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest) and drag `SubMiner.app` into `/Applications`.
**Windows (Installer/ZIP):** download the latest `SubMiner-<version>.exe` installer or portable `.zip` from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest). Keep `mpv` installed and available on `PATH`.
**From source** — initialize submodules first (`git submodule update --init --recursive`). Bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`, so source builds only need Bun for the JS toolchain. Packaged macOS and Windows installs do not require Bun. Windows installer builds go through `electron-builder`; its bundled `app-builder-lib` NSIS templates already use the third-party `WinShell` plugin for shortcut AppUserModelID assignment, and the `WinShell.dll` binary is supplied by electron-builder's cached `nsis-resources` bundle, so `bun run build:win` does not need a separate repo-local plugin install step. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
### 2. Launch the app once
```bash
# Linux
SubMiner.AppImage
```
On first launch, SubMiner now:
On macOS, launch `SubMiner.app`. On Windows, launch `SubMiner.exe` from the Start menu or install directory.
On first launch, SubMiner:
- starts in the tray/background
- creates the default config directory and `config.jsonc`
@@ -92,28 +99,19 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
| Required | Optional |
| ------------------------------------------ | -------------------------------------------------- |
| `bun`, `node` 22, `npm` | |
| `bun` (source builds, Linux `subminer`) | |
| `mpv` with IPC socket | `yt-dlp` |
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
| Linux: `hyprctl` or `xdotool` + `xwininfo` | `chafa`, `ffmpegthumbnailer` |
| macOS: Accessibility permission | |
Windows builds use native window tracking and do not require the Linux compositor helper tools.
## Documentation
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
## Testing
- Run `bun run test` or `bun run test:fast` for the default fast lane: config/core coverage plus representative entry/runtime, Anki integration, and main runtime checks.
- Run `bun run test:full` for the maintained test surface: Bun-compatible `src/**` coverage, Bun-compatible launcher unit coverage, and a Node compatibility lane for suites that depend on Electron named exports or `node:sqlite` behavior.
- Run `bun run test:node:compat` directly when you only need the Node-backed compatibility slice: `ipc`, `anki-jimaku-ipc`, `overlay-manager`, `config-validation`, `startup-config`, and runtime registry coverage.
- Run `bun run test:env` for environment-specific verification: launcher smoke/plugin checks plus the SQLite-backed immersion tracker lane.
- Run `bun run test:immersion:sqlite` when you specifically need real SQLite persistence coverage under Node with `--experimental-sqlite`.
- Run `bun run test:subtitle` for the maintained `alass`/`ffsubsync` subtitle surface.
The Bun-managed discovery lanes intentionally exclude a small set of suites that are currently Node-only because of Bun runtime/tooling gaps rather than product behavior: Electron named-export tests in `src/core/services/ipc.test.ts`, `src/core/services/anki-jimaku-ipc.test.ts`, and `src/core/services/overlay-manager.test.ts`, plus runtime/config tests in `src/main/config-validation.test.ts`, `src/main/runtime/startup-config.test.ts`, and `src/main/runtime/registry.test.ts`. `bun run test:node:compat` keeps those suites in the standard workflow instead of leaving them untracked.
## Acknowledgments
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).

Binary file not shown.

Before

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,59 @@
---
id: TASK-117
title: Prepare initial Windows release docs and version bump
status: Done
assignee:
- codex
created_date: '2026-03-08 15:17'
updated_date: '2026-03-08 15:17'
labels:
- release
- docs
- windows
dependencies: []
references:
- package.json
- README.md
- ../subminer-docs/installation.md
- ../subminer-docs/usage.md
- ../subminer-docs/changelog.md
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Prepare the initial packaged Windows release by bumping the app version and refreshing the release-facing README/backlog/docs surfaces so install and direct-command guidance no longer reads Linux-only.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 App version is bumped for the Windows release cut
- [x] #2 README and sibling docs describe Windows packaged installation alongside Linux/macOS guidance
- [x] #3 Backlog records the release-doc/version update with the modified references
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Bump the package version for the release cut.
2. Update the root README install/start guidance to mention Windows packaged builds.
3. Patch the sibling docs repo installation, usage, and changelog pages for the Windows release.
4. Record the work in Backlog and run targeted verification on the touched surfaces.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
The public README still advertised Linux/macOS only, while the sibling docs had Windows-specific runtime notes but no actual Windows install section and several direct-command examples still assumed `SubMiner.AppImage`.
Bumped `package.json` to `0.5.0`, expanded the README platform/install copy to include Windows, added a Windows install section to `../subminer-docs/installation.md`, clarified in `../subminer-docs/usage.md` that direct packaged-app examples use `SubMiner.exe` on Windows, and added a `v0.5.0` changelog entry covering the initial Windows release plus the latest overlay behavior polish.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared the initial Windows release documentation pass and version bump. `package.json` now reports `0.5.0`. The root `README.md` now advertises Linux, macOS, and Windows support, includes Windows packaged-install guidance, and clarifies first-launch behavior across platforms. In the sibling docs repo, `installation.md` now includes a dedicated Windows install section, `usage.md` explains that direct packaged-app examples use `SubMiner.exe` on Windows, and `changelog.md` now includes the `v0.5.0` release notes for the initial Windows build and recent overlay behavior changes.
Verification: targeted `bun run tsc --noEmit -p tsconfig.typecheck.json` in the app repo and `bun run docs:build` in `../subminer-docs`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,66 @@
---
id: TASK-117
title: >-
Replace YouTube subtitle generation with pure TypeScript pipeline and shared
AI config
status: Done
assignee:
- codex
created_date: '2026-03-08 03:16'
updated_date: '2026-03-08 03:35'
labels: []
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube.ts
- /Users/sudacode/projects/japanese/SubMiner/src/anki-integration/ai.ts
- /Users/sudacode/projects/japanese/SubMiner/src/types.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-integrations.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/config/resolve/subtitle-domains.ts
- /Users/sudacode/projects/japanese/SubMiner/config.example.jsonc
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Replace the launcher YouTube subtitle generation flow with a pure TypeScript pipeline that prefers real downloadable YouTube subtitles, never uses YouTube auto-generated subtitles, locally generates missing tracks with whisper.cpp, and can optionally fix generated subtitles via a shared OpenAI-compatible AI provider config. This feature also introduces a breaking config cleanup: move provider settings to a new top-level ai section and reduce ankiConnect.ai to a boolean feature toggle.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Launcher YouTube subtitle generation prefers downloadable manual YouTube subtitles, never uses YouTube auto-generated subtitles, and locally generates only missing tracks with whisper.cpp.
- [x] #2 Generated whisper subtitle tracks can optionally be post-processed with an OpenAI-compatible AI provider using shared top-level ai config, with validation and fallback to raw whisper output on failure.
- [x] #3 Configuration is updated so top-level ai is canonical shared provider config, ankiConnect.ai is boolean-only, and youtubeSubgen includes whisperVadModel, whisperThreads, and fixWithAi.
- [x] #4 Launcher CLI/config parsing, config example, and docs reflect the new breaking config shape with no migration layer.
- [x] #5 Automated tests cover the new YouTube generation behavior, AI-fix fallback/validation behavior, shared AI config usage, and breaking config validation.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Introduce canonical top-level ai config plus youtubeSubgen runtime knobs (whisperVadModel, whisperThreads, fixWithAi) and convert ankiConnect.ai to a boolean-only toggle across types, defaults, validation, option registries, launcher config parsing, and config example/docs.
2. Extract shared OpenAI-compatible AI client helpers from the current Anki translation code, including base URL normalization, API key / apiKeyCommand resolution, timeout handling, and response text extraction.
3. Update Anki translation flow and hot-reload/runtime plumbing to consume global ai config while treating ankiConnect.ai as a feature gate only.
4. Replace launcher/youtube.ts with a modular launcher/youtube pipeline that fetches only manual YouTube subtitles, generates missing tracks locally with ffmpeg + whisper.cpp + optional VAD/thread controls, and preserves preprocess/automatic playback behavior.
5. Add optional AI subtitle-fix processing for whisper-generated tracks using the shared ai client, with strict SRT batching/validation and fallback to raw whisper output on provider or format failure.
6. Expand automated coverage for config validation, shared AI usage, launcher config parsing, and YouTube subtitle generation behavior including removal of yt-dlp auto-subs and AI-fix fallback rules.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented pure TypeScript launcher/youtube pipeline modules for manual subtitle fetch, audio extraction, whisper runs, SRT utilities, and optional AI subtitle fixing. Removed yt-dlp auto-subtitle usage from the generation path.
Added shared top-level ai config plus shared AI client helpers; converted ankiConnect.ai to a boolean feature gate and updated Anki runtime wiring to consume global ai config.
Updated launcher config parsing, config template sections, and config.example.jsonc for the breaking config shape including youtubeSubgen.whisperVadModel, youtubeSubgen.whisperThreads, and youtubeSubgen.fixWithAi.
Verification: bun run test:config:src passed; targeted AI/Anki/runtime tests passed; bun run typecheck passed. bun run test:launcher:unit:src reported one unrelated existing failure in launcher/aniskip-metadata.test.ts (resolveAniSkipMetadataForFile resolves MAL id and intro payload).
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Replaced the launcher YouTube subtitle flow with a modular TypeScript pipeline that prefers manual YouTube subtitles, transcribes only missing tracks with whisper.cpp, and can optionally post-fix whisper output through a shared OpenAI-compatible AI client with strict SRT validation/fallback. Introduced canonical top-level ai config, reduced ankiConnect.ai to a boolean feature gate, updated launcher/config parsing and checked-in config artifacts, and added coverage for YouTube orchestration, whisper args, SRT validation, AI fix behavior, and breaking config validation.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,56 @@
---
id: TASK-117.1
title: Harden AI subtitle fix against non-SRT model responses
status: Done
assignee:
- '@codex'
created_date: '2026-03-08 08:22'
updated_date: '2026-03-08 08:25'
labels: []
dependencies: []
references:
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/subtitle-fix-ai.ts
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/srt.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/subtitle-fix-ai.test.ts
parent_task_id: TASK-117
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Prevent optional YouTube AI subtitle post-processing from bailing out whenever the model returns usable cue text in a non-SRT wrapper or text-only format. The launcher should recover safe cases, preserve original timing, and fall back cleanly when the response cannot be mapped back to the source cues.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 AI subtitle fixing accepts safe AI responses that omit SRT framing but still provide one corrected text payload per original cue while preserving original cue timing.
- [x] #2 AI subtitle fixing still rejects responses that cannot be mapped back to the original cue batch without guessing and falls back to the raw subtitle file with a warning.
- [x] #3 Automated tests cover wrapped-SRT and text-only AI responses plus an unrecoverable invalid response case.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add failing tests in launcher/youtube/subtitle-fix-ai.test.ts for three cases: wrapped valid SRT, text-only one-block-per-cue output, and unrecoverable invalid output.
2. Extend launcher/youtube/subtitle-fix-ai.ts with a small response-normalization path that first strips markdown/code-fence wrappers, then accepts deterministic text-only cue batches only when they map 1:1 to the original cues without changing timestamps.
3. Keep existing safety rules: preserve cue count and timing, log a warning, and fall back to the raw subtitle file when normalization cannot recover a trustworthy batch.
4. Run focused launcher unit tests for subtitle-fix-ai and SRT parsing; expand only if the change affects adjacent behavior.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented deterministic AI subtitle-response recovery for fenced SRT, embedded SRT payloads, and text-only 1:1 cue batches while preserving original timing and existing fallback behavior.
Verification: bun test launcher/youtube/*.test.ts passed; bun run typecheck passed; repo-wide format check still reports unrelated pre-existing warnings in launcher/youtube/orchestrator.ts and scripts/build-changelog*.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Hardened the launcher AI subtitle-fix path so it can recover deterministic non-SRT model responses instead of immediately falling back. Added `parseAiSubtitleFixResponse` in `launcher/youtube/subtitle-fix-ai.ts` to normalize markdown-fenced or embedded SRT payloads first, then accept text-only responses only when they map 1:1 onto the original cue batch and preserve source timings. Added regression coverage in `launcher/youtube/subtitle-fix-ai.test.ts` for fenced SRT, text-only cue batches, and unrecoverable invalid output, plus a changelog fragment in `changes/task-117.1.md`.
Verification: `bun test launcher/youtube/*.test.ts`, `bun run typecheck`, `bunx prettier --check launcher/youtube/subtitle-fix-ai.ts launcher/youtube/subtitle-fix-ai.test.ts`, and `bun run changelog:lint` passed. Repo-wide `bun run format:check:src` still reports unrelated pre-existing warnings in `launcher/youtube/orchestrator.ts` and `scripts/build-changelog*`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,64 @@
---
id: TASK-118
title: Add Windows release build and SignPath signing
status: Done
assignee:
- codex
created_date: '2026-03-08 15:17'
updated_date: '2026-03-08 15:17'
labels:
- release
- windows
- signing
dependencies: []
references:
- .github/workflows/release.yml
- build/installer.nsh
- build/signpath-windows-artifact-config.xml
- package.json
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Extend the tag-driven release workflow so Windows artifacts are built on GitHub-hosted runners and submitted to SignPath for free open-source Authenticode signing, while preserving the existing macOS notarization path.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Release workflow builds Windows installer and ZIP artifacts on `windows-latest`
- [x] #2 Workflow submits unsigned Windows artifacts to SignPath and uploads the signed outputs for release publication
- [x] #3 Repository includes a checked-in SignPath artifact-configuration source of truth for the Windows release files
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Inspect the existing release workflow and current Windows packaging configuration.
2. Add a Windows release job that builds unsigned artifacts, uploads them as a workflow artifact, and submits them to SignPath.
3. Update the release aggregation job to publish signed Windows assets and mention Windows install steps in the generated release notes.
4. Check in the Windows SignPath artifact configuration XML used to define what gets signed.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
The repository already had Windows packaging configuration (`build:win`, NSIS include script, Windows helper asset packaging), but the release workflow still built Linux and macOS only.
Added a `build-windows` job to `.github/workflows/release.yml` that runs on `windows-latest`, validates required SignPath secrets, builds unsigned Windows artifacts, uploads them with `actions/upload-artifact@v4`, and then calls the official `signpath/github-action-submit-signing-request@v2` action to retrieve signed outputs.
Checked in `build/signpath-windows-artifact-config.xml` as the source-of-truth artifact configuration for SignPath. It signs the top-level NSIS installer EXE and deep-signs `.exe` and `.dll` files inside the portable ZIP artifact.
Updated the release aggregation job to download the signed Windows artifacts and added a Windows install section to the generated GitHub release body.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Windows release publishing is now wired into the tag-driven workflow. `.github/workflows/release.yml` builds Windows artifacts on `windows-latest`, submits them to SignPath using the official GitHub action, and publishes the signed `.exe` and `.zip` outputs alongside the Linux and macOS artifacts. The workflow now requests the additional `actions: read` permission required by the SignPath GitHub integration, and the generated release notes now include Windows installation steps.
The checked-in `build/signpath-windows-artifact-config.xml` file defines the SignPath artifact structure expected by the workflow artifact ZIP: sign the top-level `SubMiner-*.exe` installer and deep-sign `.exe` and `.dll` files inside `SubMiner-*.zip`.
Verification: workflow/static changes were checked with `git diff --check` on the touched files. Actual signing requires configured SignPath secrets and a matching artifact configuration in your SignPath project.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,64 @@
---
id: TASK-118
title: Fix GitHub release workflow publish step failure
status: Done
assignee:
- Codex
created_date: '2026-03-08 03:34'
updated_date: '2026-03-08 03:38'
labels:
- ci
- release
- github-actions
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/.github/workflows/release.yml
- 'https://github.com/ksyasuda/SubMiner/actions/runs/22812335927'
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The GitHub Actions Release workflow fails during the Publish Release step for tag releases because the gh CLI invocation passes invalid arguments when creating or editing the GitHub release. Restore successful release publication for tagged builds without changing unrelated release packaging behavior.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Tagged Release workflow completes the Publish Release step without gh CLI argument errors.
- [x] #2 Release workflow still creates or updates the GitHub release as a non-prerelease for normal version tags.
- [x] #3 A regression check covers the publish command shape or workflow behavior that caused this failure.
- [x] #4 Any release workflow behavior change is documented in repository docs or workflow comments if needed.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a targeted regression test for .github/workflows/release.yml that fails if the publish step passes an argument to the gh --prerelease boolean flag or otherwise omits explicit non-prerelease behavior.
2. Run the targeted test to confirm the current workflow fails for the expected reason.
3. Patch the Publish Release step in .github/workflows/release.yml to remove the invalid gh CLI usage while preserving non-prerelease release creation/update behavior.
4. Re-run the targeted regression test and any relevant lightweight verification, then record results in task notes.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Identified root cause from GitHub Actions run 22812335927: Publish Release failed with `accepts 1 arg(s), received 2` because the workflow passed a value to gh's boolean prerelease flag.
Added a workflow comment clarifying that omitting the prerelease flag keeps normal releases as non-prerelease releases.
Added src/release-workflow.test.ts and wired it into `bun run test:fast` so CI catches the invalid workflow shape before the next tag.
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, and `bun run test:fast` all passed locally.
Code-review pass found no issues; remaining caveat is that prerelease tag semantics are still not modeled for tags like `v1.0.0-beta.1`, which is outside this fix scope.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed the GitHub Actions release publish step so tagged releases no longer fail on invalid gh CLI usage. The workflow now omits the prerelease flag when creating or editing normal releases, which preserves existing non-prerelease behavior and avoids the `accepts 1 arg(s), received 2` failure seen in run 22812335927.
Added a small regression test that reads `.github/workflows/release.yml` and asserts the publish step does not set the prerelease flag, then included that test in `bun run test:fast` so the main verification lane catches this class of workflow regression before the next release.
Validation run locally: `bun test src/release-workflow.test.ts`, `bun run typecheck`, and `bun run test:fast`. Residual risk: prerelease-tag semantics remain unchanged for tags such as `v1.0.0-beta.1`; this fix is intentionally scoped to restoring normal tagged release publication.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,37 @@
---
id: TASK-119
title: Add Jellyfin remote-session subtitle streaming to texthooker
status: To Do
assignee: []
created_date: '2026-03-08 03:46'
labels:
- jellyfin
- texthooker
- subtitle
dependencies: []
references:
- >-
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/jellyfin-remote-commands.ts
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/jellyfin.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/subtitle-processing-controller.ts
- 'https://api.jellyfin.org/'
documentation:
- 'https://api.jellyfin.org/'
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Allow SubMiner to follow subtitles from a separate Jellyfin client session, such as a TV app, without requiring local mpv playback. The feature should fetch the active subtitle stream from Jellyfin, map the remote playback position to subtitle cues, and feed the existing subtitle tokenization plus annotated texthooker websocket pipeline so texthooker-only mode can be used while watching on another device.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 User can target a remote Jellyfin session and stream its current subtitle cue into SubMiner's existing subtitle-processing pipeline without launching local Jellyfin playback in mpv.
- [ ] #2 Texthooker-only mode can display subtitle updates from the tracked remote Jellyfin session through the existing annotation websocket feed.
- [ ] #3 Remote session changes are handled safely: item changes, subtitle-track changes, pause/seek/stop, and session disconnects clear or refresh subtitle state without crashing.
- [ ] #4 The feature degrades clearly when the remote session has no usable text subtitle stream or uses an unsupported subtitle format.
- [ ] #5 Automated tests cover session tracking, subtitle cue selection, and feed integration; user-facing docs/config docs are updated.
<!-- AC:END -->

View File

@@ -0,0 +1,35 @@
---
id: TASK-120
title: 'Replace node:sqlite with libsql and remove Yomitan Node wrapper'
status: Done
assignee: []
created_date: '2026-03-08 04:14'
updated_date: '2026-03-08 04:39'
labels:
- runtime
- bun
- sqlite
- tech-debt
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Remove the remaining root Node requirement caused by immersion tracking SQLite usage and the old Yomitan build wrapper by migrating the local SQLite layer off node:sqlite, running the SQLite-backed verification lanes under Bun, and switching the vendored Yomitan build flow to Bun-native scripts.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Immersion tracker runtime no longer imports or requires node:sqlite
- [x] #2 SQLite-backed immersion tracker tests run under Bun without Node --experimental-sqlite
- [x] #3 Root build/test scripts no longer require the Yomitan Node wrapper or Node-based SQLite verification lanes
- [x] #4 README requirements/testing docs reflect the Bun-native workflow
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Replaced the immersion tracker SQLite dependency with a local libsql-backed wrapper, updated Bun/runtime compatibility tests to avoid process.exitCode side effects, switched Yomitan builds to run directly inside the vendored Bun-native project, deleted scripts/build-yomitan.mjs, and verified typecheck plus Bun build/test lanes (`build:yomitan`, `test:immersion:sqlite`, `test:runtime:compat`, `test:fast`).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,52 @@
---
id: TASK-121
title: >-
Fix YouTube manual subtitle selection regression when downloadable tracks
exist
status: Done
assignee:
- '@codex'
created_date: '2026-03-08 05:37'
updated_date: '2026-03-08 05:42'
labels:
- bug
- youtube
- subtitles
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/manual-subs.ts
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/orchestrator.ts
- 'https://www.youtube.com/watch?v=MXzQRLmN9hE'
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Ensure launcher YouTube subtitle generation reuses downloadable manual subtitle tracks when the video already has requested languages available, instead of falling back to whisper generation. Reproduce against videos like MXzQRLmN9hE that expose manual en/ja subtitles via yt-dlp.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 When requested primary/secondary manual YouTube subtitle tracks exist, planning selects them and schedules no whisper generation for those tracks.
- [x] #2 Filename normalization handles manual subtitle outputs produced by yt-dlp for language-tagged downloads.
- [x] #3 Automated tests cover the reproduced manual en/ja selection case.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Reproduced against https://www.youtube.com/watch?v=MXzQRLmN9hE with yt-dlp --list-subs: manual zh/en/ja/ko subtitle tracks are available from YouTube.
Adjusted launcher YouTube orchestration so detected manual subtitle tracks suppress whisper generation but are no longer materialized as external subtitle files. SubMiner now relies on the native YouTube/mpv subtitle tracks for those languages.
Added orchestration tests covering the manual-track reuse plan and ran a direct runtime probe against MXzQRLmN9hE. Probe result: primary/secondary native tracks detected, no external subtitle aliases emitted, output directory remained empty.
Verification: bun test launcher/youtube/orchestrator.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts passed; bun run typecheck passed.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed the YouTube subtitle regression where videos with real downloadable subtitle tracks still ended up with duplicate external subtitle files. Manual subtitle availability now suppresses whisper generation and external subtitle publication, so videos like MXzQRLmN9hE use the native YouTube/mpv subtitle tracks directly. Launcher preprocess logging was also updated to report native subtitle availability instead of misleading missing statuses.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,71 @@
---
id: TASK-122
title: Harden changelog workflow and CI enforcement
status: Done
assignee:
- Codex
created_date: '2026-03-08 06:13'
updated_date: '2026-03-08 06:28'
labels:
- release
- changelog
- ci
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/scripts/build-changelog.ts
- /Users/sudacode/projects/japanese/SubMiner/scripts/build-changelog.test.ts
- /Users/sudacode/projects/japanese/SubMiner/.github/workflows/ci.yml
- /Users/sudacode/projects/japanese/SubMiner/.github/workflows/release.yml
- /Users/sudacode/projects/japanese/SubMiner/docs/RELEASING.md
- /Users/sudacode/projects/japanese/SubMiner/changes/README.md
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Improve the release changelog workflow so changelog fragments are reliable, release output is more readable, and pull requests get early feedback when changelog metadata is missing or malformed.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 `scripts/build-changelog.ts` ignores non-fragment files in `changes/` and validates fragment structure before generating changelog output.
- [x] #2 Generated `CHANGELOG.md` and `release/release-notes.md` group public changes into readable sections instead of a flat bullet list.
- [x] #3 CI enforces changelog validation on pull requests and provides an explicit opt-out path for changes that should not produce release notes.
- [x] #4 Contributor docs explain the fragment format and the PR/release workflow for changelog generation.
- [x] #5 Automated tests cover fragment parsing/building behavior and workflow enforcement expectations.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add failing tests for changelog fragment discovery, structured fragment parsing/rendering, release-note output, and CI workflow expectations.
2. Update scripts/build-changelog.ts to ignore non-fragment files, parse fragment metadata, group generated output by change type, add lint/PR-check commands, and simplify output paths to repo-local artifacts.
3. Update CI and PR workflow files to run changelog validation on pull requests with an explicit skip path, and keep release workflow using committed changelog output.
4. Refresh changes/README.md, docs/RELEASING.md, and any PR template text so contributors know how to write fragments and when opt-out is allowed.
5. Run targeted tests and changelog commands, then record results and finalize the task.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented structured changelog fragments with required `type` and `area` metadata; `changes/README.md` is now ignored by the generator and verified by regression tests.
Added `changelog:lint` and `changelog:pr-check`, plus PR CI enforcement with `skip-changelog` opt-out. PR check now reads git name-status output so deleted fragment files do not satisfy the requirement.
Changed generated changelog/release notes output to grouped sections (`Added`, `Changed`, `Fixed`, etc.) and simplified release notes to highlights + install/assets pointers.
Kept changelog output repo-local. This aligns with existing repo direction where docs updates happen in the sibling docs repo explicitly rather than implicit local writes from app-repo generators.
Verification: `bun test scripts/build-changelog.test.ts src/ci-workflow.test.ts src/release-workflow.test.ts` passed; `bun run typecheck` passed; `bun run changelog:lint` passed. `bun run test:fast` still fails in unrelated existing `src/core/services/subsync.test.ts` cases (`runSubsyncManual keeps internal alass source file alive until sync finishes`, `runSubsyncManual resolves string sid values from mpv stream properties`).
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Hardened the changelog workflow end-to-end. `scripts/build-changelog.ts` now ignores helper files like `changes/README.md`, requires structured fragment metadata (`type` + `area`), groups generated release sections by change type, and emits shorter release notes focused on highlights plus install/assets pointers. Added explicit `changelog:lint` and `changelog:pr-check` commands, with PR validation based on git name-status so deleted fragment files do not satisfy the fragment requirement.
Updated contributor-facing workflow docs in `changes/README.md`, `docs/RELEASING.md`, and a new PR template so authors know to add a fragment or apply the `skip-changelog` label. CI now runs fragment linting on every run and enforces fragment presence on pull requests. Added regression coverage in `scripts/build-changelog.test.ts` and a new `src/ci-workflow.test.ts` to lock the workflow contract.
Verification completed: `bun test scripts/build-changelog.test.ts src/ci-workflow.test.ts src/release-workflow.test.ts`, `bun run typecheck`, and `bun run changelog:lint` all passed. A broader `bun run test:fast` run still fails in unrelated existing `src/core/services/subsync.test.ts` cases outside the changelog/workflow scope.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,52 @@
---
id: TASK-123
title: Add progress logging for YouTube subtitle generation phases
status: Done
assignee:
- '@codex'
created_date: '2026-03-08 07:07'
updated_date: '2026-03-08 07:15'
labels:
- ux
- logging
- youtube
- subtitles
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/orchestrator.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/audio-extraction.ts
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/whisper.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/subtitle-fix-ai.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Improve launcher YouTube subtitle generation observability so users can tell that work is happening and roughly how long each phase is taking. Cover manual subtitle probe, audio extraction, ffmpeg prep, whisper generation, and optional AI subtitle fix phases without flooding normal logs.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Users see clear info-level phase logs for YouTube subtitle generation work including subtitle probe, fallback audio extraction, whisper, and optional AI fix phases.
- [x] #2 Long-running phases surface elapsed-time progress or explicit start/finish timing so it is obvious the process is still active.
- [x] #3 Automated tests cover the new logging/progress helper behavior where practical.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented a shared timed YouTube phase logger in launcher/youtube/progress.ts with info-level start/finish messages and warn-level failure messages that include elapsed time.
Wired phase logging into YouTube metadata probe, manual subtitle probe, fallback audio extraction, ffmpeg whisper prep, whisper primary/secondary generation, and optional AI subtitle fix phases.
Verification: bun test launcher/youtube/progress.test.ts launcher/youtube/orchestrator.test.ts passed; bun run typecheck passed.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added clear phase-level observability for YouTube subtitle generation without noisy tool output. Users now see start/finish logs with elapsed time for subtitle probe, fallback audio extraction, ffmpeg prep, whisper generation, and optional AI subtitle-fix phases, making it obvious when generation is active and roughly how long each step took.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,76 @@
---
id: TASK-124
title: >-
Remove YouTube subtitle generation modes and make YouTube playback always
generate/load subtitles
status: Done
assignee:
- codex
created_date: '2026-03-08 07:18'
updated_date: '2026-03-08 07:28'
labels:
- launcher
- youtube
- subtitles
dependencies: []
references:
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/config/args-normalizer.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/config/youtube-subgen-config.ts
- /Users/sudacode/projects/japanese/SubMiner/launcher/types.ts
- /Users/sudacode/projects/japanese/SubMiner/config.example.jsonc
- >-
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/options-integrations.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/config/resolve/subtitle-domains.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Simplify launcher YouTube playback by removing the configurable subtitle generation mode. For YouTube targets, the launcher should treat subtitle generation/loading as the canonical behavior instead of supporting off/preprocess/automatic branches. This change should remove the unreliable automatic/background path and the mode concept from config/CLI/env/docs, while preserving the core YouTube subtitle generation pipeline and mpv loading flow.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Launcher playback no longer supports or branches on a YouTube subtitle generation mode; YouTube URLs follow a single generation-and-load flow.
- [x] #2 Configuration, CLI parsing, and environment handling no longer expose a YouTube subtitle generation mode option, and stale automatic/preprocess/off values are not part of the supported interface.
- [x] #3 Tests cover the new single-flow behavior and the removal of mode parsing/branching.
- [x] #4 User-facing config/docs/examples are updated to reflect the removed mode concept.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Remove the YouTube subtitle generation mode concept from launcher/shared types, config parsing, CLI options, and environment normalization so no supported interface accepts automatic/preprocess/off.
2. Update playback orchestration so YouTube targets always run subtitle generation/loading before mpv startup and delete the background automatic path.
3. Adjust mpv YouTube URL argument construction to no longer branch on mode while preserving subtitle/audio language behavior and preloaded subtitle file injection.
4. Add/modify tests first to cover removed mode parsing and the single YouTube preload flow, then update config/docs/examples to match the simplified interface.
5. Run focused launcher/config tests plus typecheck, then summarize any remaining gaps.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Removed launcher/shared youtubeSubgen.mode handling and collapsed YouTube playback onto a single preload-before-mpv subtitle generation flow.
Added launcher integration coverage proving YouTube subtitle generation runs before mpv startup and that the removed --mode flag now errors.
Verification: bun test launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/mpv.test.ts launcher/main.test.ts src/config/config.test.ts; bun run test:config:src; bun run typecheck.
Broader repo checks still show pre-existing issues outside this change: bun run test:launcher:unit:src fails in launcher/aniskip-metadata.test.ts (MAL id assertion), and format scope check reports unrelated existing files launcher/youtube/orchestrator.ts, scripts/build-changelog.test.ts, scripts/build-changelog.ts.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Removed the launcher YouTube subtitle generation mode surface so YouTube playback now always runs the subtitle generation pipeline before starting mpv. The launcher no longer accepts youtubeSubgen.mode from shared config, CLI, or env normalization, and the old automatic/background loading path has been deleted from playback.
Updated mpv YouTube startup options to keep manual subtitle discovery enabled without requesting auto subtitles, and refreshed user-facing config/docs to describe a single YouTube subtitle generation flow. Added regression coverage for mode removal, config/template cleanup, and launcher ordering so YouTube subtitle work is confirmed to happen before mpv launch.
Verification: bun test launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/mpv.test.ts launcher/main.test.ts src/config/config.test.ts; bun run test:config:src; bun run typecheck. Broader unrelated repo issues remain in launcher/aniskip-metadata.test.ts and existing formatting drift in launcher/youtube/orchestrator.ts plus scripts/build-changelog files.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,35 @@
---
id: TASK-125
title: Add native AI API key secret storage
status: To Do
assignee: []
created_date: '2026-03-08 07:25'
labels:
- ai
- config
- security
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/ai/client.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-token-store.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/jellyfin-token-store.ts
- /Users/sudacode/projects/japanese/SubMiner/src/main.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Store the shared AI provider API key using the app's native secret-storage pattern so users do not need to keep the OpenRouter key in config files or shell commands.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Users can configure the shared AI provider without storing the API key in config.jsonc.
- [ ] #2 The app persists and reloads the shared AI API key using encrypted native secret storage when available.
- [ ] #3 Behavior is defined for existing ai.apiKey and ai.apiKeyCommand configs, including compatibility during migration.
- [ ] #4 The feature has regression tests covering key resolution and storage behavior.
- [ ] #5 User-facing configuration/docs are updated to describe the supported setup.
<!-- AC:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-126
title: >-
Improve secondary subtitle readability with hover-only background and stronger
text separation
status: Done
assignee: []
created_date: '2026-03-08 07:35'
updated_date: '2026-03-08 07:40'
labels:
- overlay
- subtitles
- ui
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Adjust overlay secondary subtitle styling so translation text stays readable on bright video backgrounds. Keep the dark background hidden by default in hover mode and show it only while hovered. Increase secondary subtitle weight to 600 and strengthen edge separation without changing primary subtitle styling.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Secondary subtitles render with stronger edge separation than today.
- [x] #2 Secondary subtitle font weight defaults to 600.
- [x] #3 When secondary subtitle mode is hover, the secondary background appears only while hovered.
- [x] #4 Primary subtitle styling behavior remains unchanged.
- [x] #5 Renderer tests cover the new secondary hover background behavior and default secondary style values.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Adjusted secondary subtitle defaults to use stronger shadowing, 600 font weight, and a translucent dark background. Routed secondary background/backdrop styling through CSS custom properties so hover mode can keep the background hidden until the secondary subtitle is actually hovered. Added renderer and config tests covering default values and hover-only background behavior.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Improved secondary subtitle readability by strengthening default text separation, increasing the default secondary weight to 600, and making the configured dark background appear only while hovered in secondary hover mode. Added config and renderer coverage for the new defaults and hover-aware style routing.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,83 @@
---
id: TASK-127
title: Skip AniSkip lookup for YouTube and URL playback targets
status: Done
assignee:
- '@codex'
created_date: '2026-03-08 08:24'
updated_date: '2026-03-08 10:12'
labels:
- bug
- launcher
- youtube
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/launcher/mpv.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
- /Users/sudacode/projects/japanese/SubMiner/launcher/mpv.test.ts
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Prevent launcher playback from attempting AniSkip metadata resolution when the user is playing a YouTube target or any URL target. AniSkip only works for local anime files, so URL-driven playback and YouTube subtitle-generation flows should bypass it entirely.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Launcher playback skips AniSkip metadata resolution for explicit URL targets, including YouTube URLs.
- [x] #2 YouTube subtitle-generation playback does not invoke AniSkip lookup before mpv launch.
- [x] #3 Automated launcher tests cover the URL/YouTube skip behavior.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a launcher mpv unit test that intercepts AniSkip resolution and proves URL/YouTube playback does not call it before spawning mpv.
2. Run the focused launcher mpv test to confirm the new case fails or exposes the current gap.
3. Patch launcher playback/AniSkip gating so URL and YouTube subtitle-generation paths always bypass AniSkip lookup.
4. Re-run focused launcher tests and record the verification results in task notes.
5. Add a Lua plugin regression test covering overlay-start on URL playback so AniSkip never runs after auto-start.
6. Patch plugin/subminer/aniskip.lua to short-circuit all AniSkip lookup triggers for remote URL media paths.
7. Re-run plugin regression plus touched launcher checks and update the task summary with the plugin-side fix.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added explicit AniSkip gating in launcher/mpv.ts via shouldResolveAniSkipMetadata(target, targetKind, preloadedSubtitles).
URL targets now always bypass AniSkip. File targets with preloaded subtitles also bypass AniSkip, covering YouTube subtitle-preload playback.
Added launcher/mpv.test.ts coverage for local-file vs URL vs preloaded-subtitle AniSkip gating.
Verification: bun test launcher/mpv.test.ts passed.
Verification: bun run typecheck passed.
Verification: bunx prettier --check launcher/mpv.ts launcher/mpv.test.ts passed.
Verification: bun run changelog:lint passed.
Verification: bun run test:launcher:unit:src remains blocked by unrelated existing failure in launcher/aniskip-metadata.test.ts (`resolveAniSkipMetadataForFile resolves MAL id and intro payload`: expected malId 1234, got null).
Added plugin regression in scripts/test-plugin-start-gate.lua for URL playback with auto-start/overlay-start; it now asserts no MAL or AniSkip curl requests occur.
Patched plugin/subminer/aniskip.lua to short-circuit AniSkip lookup for remote media paths (`scheme://...`), which covers YouTube URL playback inside the mpv plugin lifecycle.
Verification: lua scripts/test-plugin-start-gate.lua passed.
Verification: bun run test:plugin:src passed.
Verification: bun test launcher/mpv.test.ts passed after plugin-side fix.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed AniSkip suppression end-to-end for URL playback. The launcher now skips AniSkip before mpv launch, and the mpv plugin now also refuses AniSkip lookups for remote URL media during file-loaded, overlay-start, or later refresh triggers. Added regression coverage in both launcher/mpv.test.ts and scripts/test-plugin-start-gate.lua, plus a changelog fragment. Wider `bun run test:launcher:unit:src` is still blocked by the unrelated existing launcher/aniskip-metadata.test.ts MAL-id failure.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,35 @@
---
id: TASK-128
title: >-
Prevent AI subtitle fix from translating primary YouTube subtitles into the
wrong language
status: Done
assignee: []
created_date: '2026-03-08 09:02'
updated_date: '2026-03-08 09:17'
labels:
- bug
- youtube-subgen
- ai
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
AI subtitle cleanup can preserve cue structure while changing subtitle language, causing primary Japanese subtitle files to come back in English. Add guards so AI-fixed subtitles preserve expected language and fall back to raw Whisper output when language drifts.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Primary AI subtitle fix rejects output that drifts away from the expected source language.
- [x] #2 Rejected AI fixes fall back to the raw Whisper subtitle without corrupting published subtitle language.
- [x] #3 Regression tests cover a primary Japanese subtitle batch being translated into English by the AI fixer.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added a primary-language guard to AI subtitle fixing so Japanese source subtitles are rejected if the AI rewrites them into English while preserving SRT structure. The fixer now receives the expected source language from the YouTube orchestrator, and regression coverage verifies that language drift falls back to the raw Whisper subtitle path.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,37 @@
---
id: TASK-129
title: >-
Split AI model and system prompt config between Anki and YouTube subtitle
generation
status: Done
assignee: []
created_date: '2026-03-08 09:40'
updated_date: '2026-03-08 09:57'
labels:
- config
- ai
- anki
- youtube-subgen
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The current top-level shared AI config forces Anki translation and YouTube subtitle fixing to share the same model and system prompt, which caused subtitle-fix requests to inherit a translation prompt and translate Japanese primary subtitles into English. Refactor config so provider credentials stay shared while model and system prompt can be configured per feature.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Anki integration can use its own AI model and system prompt independently of YouTube subtitle generation.
- [x] #2 YouTube subtitle generation can use its own AI model and system prompt independently of Anki integration.
- [x] #3 Existing shared provider credentials remain reusable without duplicating API key/base URL config.
- [x] #4 Config example, defaults, validation, and regression tests cover the new per-feature override shape.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added per-feature AI model/systemPrompt overrides for Anki and YouTube subtitle generation while keeping shared provider transport settings reusable. Anki now accepts `ankiConnect.ai` object config with `enabled`, `model`, and `systemPrompt`; YouTube subtitle generation accepts `youtubeSubgen.ai` overrides and merges them over the shared AI provider config. Updated config resolution, launcher parsing, runtime wiring, hot-reload handling, example config, and regression coverage.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,76 @@
---
id: TASK-130
title: Keep background SubMiner alive after launcher-managed mpv exits
status: Done
assignee:
- codex
created_date: '2026-03-08 10:08'
updated_date: '2026-03-08 11:00'
labels:
- bug
- launcher
- mpv
- overlay
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The launcher currently tears down the running SubMiner background process when a launcher-managed mpv session exits. Background SubMiner should remain alive so a later mpv instance can reconnect and request the overlay without restarting the app.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Closing a launcher-managed mpv session does not send `--stop` to the running SubMiner background process.
- [x] #2 Closing a launcher-managed mpv session does not SIGTERM the tracked SubMiner process just because mpv exited.
- [x] #3 Launcher cleanup still terminates mpv and launcher-owned helper children without regressing existing overlay start behavior.
- [x] #4 Automated tests cover the no-stop-on-mpv-exit behavior.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a launcher regression test that proves mpv exit no longer triggers SubMiner `--stop` or launcher SIGTERM of the tracked overlay process.
2. Update launcher teardown so normal mpv-session cleanup only stops mpv/helper children and preserves the background SubMiner process for future reconnects.
3. Run the focused launcher tests and smoke coverage for the affected behavior, then record results in the task.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Split launcher cleanup so normal mpv-session shutdown no longer sends `--stop` to SubMiner or SIGTERM to the tracked overlay process. Added `cleanupPlaybackSession()` for mpv/helper-child cleanup only, and switched playback finalization to use it.
Updated launcher smoke coverage to assert the background app stays alive after mpv exits, and added a focused unit regression for the new cleanup path.
Validation: `bun test launcher/mpv.test.ts launcher/smoke.e2e.test.ts` passed; `bun run typecheck` passed. `bun run test:launcher:unit:src` still reports an unrelated pre-existing failure in `launcher/aniskip-metadata.test.ts`.
Added changelog fragment `changes/task-130.md` for the launcher fix and verified it with `bun run changelog:lint`.
User verified the bug still reproduces when closing playback with `q`. Root cause narrowed further: the mpv plugin `plugin/subminer/lifecycle.lua` calls `process.stop_overlay()` on mpv `shutdown`, which still sends SubMiner `--stop` even after launcher cleanup was fixed.
Patched the remaining stop path in `plugin/subminer/lifecycle.lua`: mpv `shutdown` no longer calls `process.stop_overlay()`. Pressing mpv `q` should now preserve the background app and only tear down the mpv session.
Validation update: `lua scripts/test-plugin-start-gate.lua` passed after adding a shutdown regression, and `bun test launcher/mpv.test.ts launcher/smoke.e2e.test.ts` still passed.
Fixed a second-instance reconnect bug in `src/core/services/cli-command.ts`: `--start` on an already-initialized running instance now still updates the MPV socket path and reconnects the MPV client instead of treating the command as a no-op. This keeps the already-warmed background app reusable for later mpv launches.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Kept the background SubMiner process reusable across both mpv shutdown and later reconnects. The first fix separated launcher playback cleanup from full app shutdown. The second fix removed the mpv plugin `shutdown` stop call so default mpv `q` no longer sends SubMiner `--stop`. The third fix corrected second-instance CLI handling so `--start` on an already-running, already-initialized instance still reconnects MPV instead of being ignored.
Net effect: background SubMiner can stay alive, keep its warm state, and reconnect to later mpv instances without rerunning startup/warmup work in a fresh app instance.
Coverage now includes: launcher playback cleanup (`launcher/mpv.test.ts`), launcher smoke reconnect/keep-alive flow (`launcher/smoke.e2e.test.ts`), mpv plugin shutdown preservation (`scripts/test-plugin-start-gate.lua`), and second-instance start/reconnect behavior (`src/core/services/cli-command.test.ts`).
Tests run:
- `bun test src/core/services/cli-command.test.ts launcher/mpv.test.ts launcher/smoke.e2e.test.ts`
- `lua scripts/test-plugin-start-gate.lua`
- `bun run typecheck`
- `bun run changelog:lint`
Note: the broader `bun run test:launcher:unit:src` lane still has an unrelated pre-existing failure in `launcher/aniskip-metadata.test.ts`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,63 @@
---
id: TASK-131
title: Avoid duplicate tokenization warmup after background startup
status: Done
assignee:
- codex
created_date: '2026-03-08 10:12'
updated_date: '2026-03-08 12:00'
labels:
- bug
dependencies: []
references:
- >-
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/composers/mpv-runtime-composer.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-warmups.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/composers/mpv-runtime-composer.test.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
When SubMiner is already running in the background and mpv is launched from the launcher or mpv plugin, the live app should reuse startup tokenization warmup state instead of re-entering the Yomitan/tokenization/annotation warmup path on first overlay use.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Background startup tokenization warmup is recorded in the runtime state used by later mpv/tokenization flows.
- [x] #2 Launching mpv from the launcher or plugin against an already-running background app does not re-run duplicate Yomitan/tokenization annotation warmup work in the live process.
- [x] #3 Regression tests cover the warmed-background path and protect against re-entering duplicate warmup work.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a regression test covering the case where background startup warmups already completed and a later tokenize call must not re-enter Yomitan/MeCab/dictionary warmups.
2. Update mpv tokenization warmup composition so startup background warmups and on-demand tokenization share the same completion state.
3. Run the focused composer/runtime tests and update acceptance criteria/notes with results.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Root-cause hypothesis: startup background warmups and on-demand tokenization warmups use separate state, so later mpv launch can re-enter warmup bookkeeping even though background startup already warmed dependencies.
Implemented shared warmup state between startup background warmups and on-demand tokenization warmups by forwarding scheduled Yomitan/tokenization promises into the mpv runtime composer. Added regression coverage for the warmed-background path. Verified with `bun run test:fast` plus focused composer/startup warmup tests.
Follow-up root cause from live retest: second mpv open could still pause on the startup gate because the runtime only treated full background tokenization warmup completion as reusable readiness. In practice, first-file tokenization could already be ready while slower dictionary prewarm work was still finishing, so reopening a video waited on duplicate warmup completion even though annotations were already usable.
Adjusted `src/main/runtime/composers/mpv-runtime-composer.ts` so autoplay reuse keys off a separate playback-ready latch. The latch flips true either when background warmups fully cover tokenization or when `onTokenizationReady` fires for a real subtitle line. `src/main.ts` already uses `isTokenizationWarmupReady()` to fast-signal `subminer-autoplay-ready` on a fresh media-path change, so reopened videos can now resume immediately once tokenization has succeeded once in the persistent app.
Validation update: `bun test src/core/services/cli-command.test.ts src/main/runtime/mpv-main-event-actions.test.ts src/main/runtime/composers/mpv-runtime-composer.test.ts launcher/mpv.test.ts launcher/smoke.e2e.test.ts` passed, `lua scripts/test-plugin-start-gate.lua` passed, and `bun run typecheck` passed.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Background startup tokenization warmups now feed the same in-memory warmup state used by later mpv tokenization. When the app is already running and warmed in the background, launcher/plugin-driven mpv startup reuses that state instead of re-entering Yomitan/tokenization annotation warmups. Added a regression test for the warmed-background path and verified with `bun run test:fast`.
A later follow-up fixed the remaining second-open delay: autoplay reuse no longer waits for the entire background dictionary warmup pipeline to finish. After the persistent app has produced one tokenization-ready event, later mpv reconnects reuse that readiness immediately, so reopening the same or another video does not pause again on duplicate warmup bookkeeping.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,69 @@
---
id: TASK-132
title: Gate macOS overlay shortcuts to the focused mpv window
status: Done
assignee:
- codex
created_date: '2026-03-08 18:24'
updated_date: '2026-03-08 18:55'
labels:
- bug
- macos
- shortcuts
dependencies: []
references:
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/overlay-shortcut.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/window-trackers/macos-tracker.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/scripts/get-mpv-window-macos.swift
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Fix the macOS shortcut handling so SubMiner overlay keybinds do not intercept system or other-app shortcuts while SubMiner is in the background. Overlay shortcuts should only be active while the tracked mpv window is present and focused, and should stop grabbing keyboard input when mpv is not the frontmost window.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 On macOS, overlay shortcuts do not trigger while mpv is not the focused/frontmost window.
- [x] #2 On macOS, overlay shortcuts remain available while the tracked mpv window is open and focused.
- [x] #3 Existing non-macOS shortcut behavior is unchanged.
- [x] #4 Automated tests cover the macOS focus-gating behavior and guard against background shortcut interception.
- [x] #5 Any user-facing docs/config notes affected by the behavior change are updated in the same task if needed.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a failing macOS-focused shortcut lifecycle test that proves overlay shortcuts stay inactive when the tracked mpv window exists but is not frontmost, and activate when that tracked window becomes frontmost.
2. Add a failing tracker/helper test that covers the focused/frontmost signal parsed from the macOS helper output.
3. Extend the macOS helper/tracker contract to surface both geometry and focused/frontmost state for the tracked mpv window.
4. Wire overlay shortcut activation to require both overlay runtime initialization and tracked-mpv focus on macOS, while leaving non-macOS behavior unchanged.
5. Re-run the targeted shortcut/tracker tests, then the broader related shortcut/runtime suite, and update task notes/acceptance criteria based on results.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added a macOS-specific shortcut activation predicate so global overlay shortcuts now require both overlay runtime readiness and a focused tracked mpv window; non-macOS behavior still keys off runtime readiness only.
Extended the base window tracker with optional focus-state callbacks/getters and wired initializeOverlayRuntime to re-sync overlay shortcuts whenever tracker focus changes.
Updated the macOS helper/tracker contract to return geometry plus frontmost/focused state for the tracked mpv process and added parser coverage for focused and unfocused output.
Verified with `bun x tsc -p tsconfig.json --noEmit`, targeted shortcut/tracker tests, and `bun run test:core:src` (439 passing).
No user-facing config or documentation surface changed, so no docs update was required for this fix.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed the macOS background shortcut interception bug by gating SubMiner's global overlay shortcuts on tracked mpv focus instead of overlay-runtime initialization alone. The macOS window helper now reports whether the tracked mpv process is frontmost, the tracker exposes focus change callbacks, and overlay shortcut synchronization re-runs when that focus state flips so `Ctrl+C`/`Ctrl+V` and similar shortcuts are no longer captured while mpv is in the background.
The change keeps existing non-macOS shortcut behavior unchanged. Added regression coverage for the activation decision, tracker focus-change re-sync, and macOS helper output parsing. Verification: `bun x tsc -p tsconfig.json --noEmit`, targeted shortcut/tracker tests, and `bun run test:core:src` (439 passing).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,47 @@
---
id: TASK-133
title: Improve AniList character dictionary parity with upstream guide
status: To Do
assignee: []
created_date: '2026-03-08 21:06'
updated_date: '2026-03-08 21:35'
labels:
- dictionary
- anilist
- planning
dependencies: []
references:
- >-
https://github.com/bee-san/Japanese_Character_Name_Dictionary/blob/main/docs/agents_read_me.md
- >-
/Users/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.test.ts
documentation:
- >-
/Users/sudacode/projects/japanese/SubMiner/docs/plans/2026-03-08-anilist-character-dictionary-parity-design.md
- >-
/Users/sudacode/projects/japanese/SubMiner/docs/plans/2026-03-08-anilist-character-dictionary-parity.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Plan and implement guide-faithful parity improvements for the AniList character dictionary flow inside SubMiner's current single-media generation path. Scope includes AniList first/last name hints, hint-aware reading generation for kanji/native names, expanded honorific coverage, 160x200 JPEG thumbnail handling, and AniList 429 retry/backoff behavior.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 AniList character queries include first/last name fields and preserve them through runtime data models.
- [ ] #2 Dictionary generation uses hint-aware name splitting and reading generation for kanji and mixed native names, not only kana-only readings.
- [ ] #3 Honorific generation is expanded substantially toward upstream coverage and is covered by regression tests.
- [ ] #4 Character and voice-actor images are resized or re-encoded to bounded JPEG thumbnails with fallback behavior.
- [ ] #5 AniList requests handle 429 responses with bounded exponential backoff and tests cover retry behavior.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Approved design and implementation plan captured on 2026-03-08. Scope stays within current single-media AniList dictionary flow; excludes username-driven CURRENT-list fetching and Yomitan auto-update schema work.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,66 @@
---
id: TASK-134
title: Harden Windows release signing against transient SignPath failures
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-08 20:23'
labels:
- ci
- release
- windows
- signing
dependencies: []
references:
- .github/workflows/release.yml
- package.json
- src/release-workflow.test.ts
- https://github.com/ksyasuda/SubMiner/actions/runs/22836585479
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The tag-driven Release workflow currently fails the Windows lane if the SignPath connector returns transient 502 errors during submission, and the tagged build scripts also allow electron-builder to implicitly publish unsigned artifacts before the final release job runs. Harden the workflow so transient SignPath outages get bounded retries and release packaging never auto-publishes unsigned assets.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Windows release signing retries transient SignPath submission failures within the release workflow before failing the job.
- [ ] #2 Release packaging scripts disable electron-builder implicit publish so build jobs do not upload unsigned assets on tag builds.
- [ ] #3 Regression coverage fails if SignPath retry scaffolding or publish suppression is removed.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a regression test for the release workflow/package script shape covering SignPath retries and `--publish never`.
2. Patch the Windows release job to retry SignPath submission a bounded number of times and still fail hard if every attempt fails.
3. Update tagged package build scripts to disable implicit electron-builder publishing during release builds.
4. Run targeted release-workflow verification and capture any remaining manual release cleanup needed for `v0.5.0`.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
The failed Windows signing step in GitHub Actions run `22836585479` was not caused by missing secrets or an artifact-shape mismatch. The SignPath GitHub action retried repeated `502` responses from the SignPath connector for several minutes and then failed the job.
Hardened `.github/workflows/release.yml` by replacing the single SignPath submission with three bounded attempts. The second and third submissions only run if the previous attempt failed, and the job now fails with an explicit rerun message only after all three attempts fail. Signed-artifact upload is keyed to the successful attempt so the release job still consumes the normal `windows` artifact name.
Also fixed a separate release regression exposed by the same run: `electron-builder` was implicitly publishing unsigned release assets during tag builds because the packaging scripts did not set `--publish never` and the workflow injected `GH_TOKEN` into build jobs. Updated the relevant package scripts to pass `--publish never`, removed `GH_TOKEN` from the packaging jobs, and made the final publish step force `--draft=false` when editing an existing tag release so previously-created draft releases get published.
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, and `bun run test:fast` all passed locally after restoring the missing local `libsql` install with `bun install --frozen-lockfile`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Windows release signing is now resilient to transient SignPath connector outages. The release workflow retries the SignPath submission up to three times before failing, and only uploads the signed Windows artifact from the attempt that succeeded.
Release packaging also no longer auto-publishes unsigned assets on tag builds. The `electron-builder` scripts now force `--publish never`, the build jobs no longer pass `GH_TOKEN` into packaging steps, and the final GitHub release publish step explicitly clears draft state when updating an existing tag release.
Validation: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
Manual follow-up for the failed `v0.5.0` release: rerun the `Release` workflow after merging/pushing this fix, then clean up the stray draft/untagged release assets created by the failed run if they remain.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,57 @@
---
id: TASK-135
title: Cut patch release v0.5.1 for Windows signing fix
status: Done
assignee:
- codex
created_date: '2026-03-08 20:24'
updated_date: '2026-03-08 20:28'
labels:
- release
- patch
dependencies:
- TASK-134
references:
- package.json
- CHANGELOG.md
- release/release-notes.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Publish a patch release from the workflow-signing fix on `main` by bumping the app version, generating the committed changelog artifacts for the new version, and pushing a new `v0.5.1` tag instead of rewriting the failed `v0.5.0` tag.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Repository version metadata is updated to `0.5.1`.
- [ ] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.1` section and released fragments are removed.
- [ ] #3 New `v0.5.1` commit and tag are pushed to `origin`.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Bump the package version to `0.5.1`.
2. Run the changelog builder so `CHANGELOG.md`/`release-notes.md` match the release workflow contract.
3. Run the relevant verification commands.
4. Commit the release-prep changes, create `v0.5.1`, and push both commit and tag.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Bumped `package.json` from `0.5.0` to `0.5.1`, then ran `bun run changelog:build` so the committed release artifacts match the release workflow contract. That prepended the `v0.5.1` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed changelog fragments from `changes/`.
Verification before tagging: `bun run changelog:lint`, `bun run changelog:check --version 0.5.1`, `bun run typecheck`, and `bun run test:fast`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared patch release `v0.5.1` from the signing-workflow fix on `main` instead of rewriting the failed `v0.5.0` tag. Repository version metadata, changelog, and committed release notes are all aligned with the new release tag, and the consumed changelog fragments were removed.
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.1`, `bun run typecheck`, `bun run test:fast`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,61 @@
---
id: TASK-136
title: Pin SignPath artifact configuration in release workflow
status: Done
assignee:
- codex
created_date: '2026-03-08 20:41'
updated_date: '2026-03-08 20:58'
labels:
- ci
- release
- windows
- signing
dependencies:
- TASK-134
references:
- .github/workflows/release.yml
- build/signpath-windows-artifact-config.xml
- src/release-workflow.test.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The Windows release workflow currently relies on the default SignPath artifact configuration configured in the SignPath UI. Pin the workflow to an explicit artifact-configuration slug so the checked-in signing configuration and CI behavior stay deterministic across future SignPath project changes.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The Windows release workflow validates a dedicated SignPath artifact-configuration secret/input.
- [ ] #2 Every SignPath submission attempt passes `artifact-configuration-slug`.
- [ ] #3 Regression coverage fails if the explicit SignPath artifact-configuration binding is removed.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a failing workflow regression test for the explicit SignPath artifact-configuration slug.
2. Patch the Windows signing secret validation and SignPath action inputs to require the slug.
3. Run targeted release-workflow verification plus the standard fast lane.
4. Cut a new patch release so the tag-triggered release workflow runs with the pinned SignPath configuration.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added regression coverage in `src/release-workflow.test.ts` for an explicit SignPath artifact-configuration slug so the release workflow test now fails if the slug validation or action input is removed.
Patched `.github/workflows/release.yml` so Windows signing now requires `SIGNPATH_ARTIFACT_CONFIGURATION_SLUG` during secret validation and passes `artifact-configuration-slug: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}` on every SignPath submission attempt.
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
The release workflow is now pinned to an explicit SignPath artifact configuration instead of relying on whichever SignPath artifact config is marked default in the UI. Windows signing secret validation fails fast if `SIGNPATH_ARTIFACT_CONFIGURATION_SLUG` is missing, and every SignPath submission attempt now includes the pinned slug.
Validation: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,57 @@
---
id: TASK-137
title: Cut patch release v0.5.2 for SignPath artifact config pinning
status: Done
assignee:
- codex
created_date: '2026-03-08 20:44'
updated_date: '2026-03-08 20:58'
labels:
- release
- patch
dependencies:
- TASK-136
references:
- package.json
- CHANGELOG.md
- release/release-notes.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Publish a patch release from the SignPath artifact-configuration pinning change by bumping the app version, generating the committed changelog artifacts for the new version, and pushing a new `v0.5.2` tag.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Repository version metadata is updated to `0.5.2`.
- [ ] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.2` section and consumed fragments are removed.
- [ ] #3 New `v0.5.2` commit and tag are pushed to `origin`.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add the release fragment for the SignPath configuration pinning change.
2. Bump `package.json` to `0.5.2` and run the changelog builder.
3. Run changelog/typecheck/test verification.
4. Commit the release-prep change set, create `v0.5.2`, and push commit plus tag.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Bumped `package.json` from `0.5.1` to `0.5.2`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.2` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/signpath-artifact-config-pin.md` fragment.
Verification before tagging: `bun run changelog:lint`, `bun run changelog:check --version 0.5.2`, `bun run typecheck`, and `bun run test:fast`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared patch release `v0.5.2` so the explicit SignPath artifact-configuration pin ships on a fresh release tag. Version metadata, committed changelog artifacts, and release notes are aligned with the new patch version.
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.2`, `bun run typecheck`, `bun run test:fast`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,62 @@
---
id: TASK-138
title: Publish unsigned Windows release artifacts and add local unsigned build script
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:00'
labels:
- release
- windows
dependencies: []
references:
- .github/workflows/release.yml
- package.json
- src/release-workflow.test.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Stop the tag-driven release workflow from depending on SignPath and publish unsigned Windows `.exe` and `.zip` artifacts directly. Add an explicit local `build:win:unsigned` script without changing the existing `build:win` command.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Windows release CI builds unsigned artifacts without requiring SignPath secrets.
- [x] #2 The Windows release job uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact.
- [x] #3 The repo exposes a local `build:win:unsigned` script for explicit unsigned Windows packaging.
- [x] #4 Regression coverage fails if the workflow reintroduces SignPath submission or drops the unsigned script.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Update workflow regression tests to assert unsigned Windows release behavior and the new local script.
2. Patch `package.json` to add `build:win:unsigned`.
3. Patch `.github/workflows/release.yml` to build unsigned Windows artifacts and upload them directly.
4. Add the release changelog fragment and run focused verification.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Removed the Windows SignPath secret validation and submission steps from `.github/workflows/release.yml`. The Windows release job now runs `bun run build:win:unsigned` and uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact consumed by the release job.
Added `scripts/build-win-unsigned.mjs` plus the `build:win:unsigned` package script. The wrapper clears Windows code-signing environment variables and disables identity auto-discovery before invoking `electron-builder`, so release CI stays unsigned even if signing credentials are configured elsewhere.
Updated `src/release-workflow.test.ts` to assert the unsigned workflow contract and added the release changelog fragment in `changes/unsigned-windows-release-builds.md`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Windows release CI now publishes unsigned artifacts directly and no longer depends on SignPath. Local developers also have an explicit `bun run build:win:unsigned` path for unsigned packaging without changing the existing `build:win` command.
Verification:
- `bun test src/release-workflow.test.ts`
- `bun run typecheck`
- `node --check scripts/build-win-unsigned.mjs`
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,57 @@
---
id: TASK-139
title: Cut patch release v0.5.3 for unsigned Windows release builds
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:00'
labels:
- release
- patch
dependencies:
- TASK-138
references:
- package.json
- CHANGELOG.md
- release/release-notes.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Publish a patch release from the unsigned Windows release-build change by bumping the app version, generating committed changelog artifacts for `v0.5.3`, and pushing the release-prep commit.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Repository version metadata is updated to `0.5.3`.
- [x] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.3` section and consumed fragments are removed.
- [x] #3 New `v0.5.3` release-prep commit is pushed to `origin/main`.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Bump `package.json` from `0.5.2` to `0.5.3`.
2. Run `bun run changelog:build` so committed changelog artifacts match the new patch version.
3. Run changelog/typecheck/test verification.
4. Commit the release-prep change set and push `main`.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Bumped `package.json` from `0.5.2` to `0.5.3`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.3` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/unsigned-windows-release-builds.md` fragment.
Verification before push: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, and `bun run test:fast`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared patch release `v0.5.3` so the unsigned Windows release-build change is captured in committed release metadata on `main`. Version metadata, changelog output, and release notes are aligned with the new patch version.
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, `bun run test:fast`.
<!-- SECTION:FINAL_SUMMARY:END -->

153
build/installer.nsh Normal file
View File

@@ -0,0 +1,153 @@
!include "MUI2.nsh"
!include "nsDialogs.nsh"
Var WindowsMpvShortcutStartMenuPath
Var WindowsMpvShortcutDesktopPath
!macro ResolveWindowsMpvShortcutPaths
!ifdef MENU_FILENAME
StrCpy $WindowsMpvShortcutStartMenuPath "$SMPROGRAMS\${MENU_FILENAME}\SubMiner mpv.lnk"
!else
StrCpy $WindowsMpvShortcutStartMenuPath "$SMPROGRAMS\SubMiner mpv.lnk"
!endif
StrCpy $WindowsMpvShortcutDesktopPath "$DESKTOP\SubMiner mpv.lnk"
!macroend
!ifndef BUILD_UNINSTALLER
Var WindowsMpvShortcutStartMenuCheckbox
Var WindowsMpvShortcutDesktopCheckbox
Var WindowsMpvShortcutStartMenuEnabled
Var WindowsMpvShortcutDesktopEnabled
Var WindowsMpvShortcutDefaultsInitialized
!macro customInit
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
StrCpy $WindowsMpvShortcutDefaultsInitialized "0"
!macroend
!macro customPageAfterChangeDir
PageEx custom
PageCallbacks WindowsMpvShortcutPageCreate WindowsMpvShortcutPageLeave
Caption " "
PageExEnd
!macroend
Function HasExistingInstallation
ReadRegStr $0 SHELL_CONTEXT "Software\${APP_GUID}" InstallLocation
${if} $0 == ""
Push "0"
${else}
Push "1"
${endif}
FunctionEnd
Function InitializeWindowsMpvShortcutDefaults
${if} $WindowsMpvShortcutDefaultsInitialized == "1"
Return
${endif}
!insertmacro ResolveWindowsMpvShortcutPaths
Call HasExistingInstallation
Pop $0
${if} $0 == "1"
${if} ${FileExists} "$WindowsMpvShortcutStartMenuPath"
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
${else}
StrCpy $WindowsMpvShortcutStartMenuEnabled "0"
${endif}
${if} ${FileExists} "$WindowsMpvShortcutDesktopPath"
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
${else}
StrCpy $WindowsMpvShortcutDesktopEnabled "0"
${endif}
${else}
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
${endif}
StrCpy $WindowsMpvShortcutDefaultsInitialized "1"
FunctionEnd
Function WindowsMpvShortcutPageCreate
Call InitializeWindowsMpvShortcutDefaults
!insertmacro MUI_HEADER_TEXT "Windows mpv launcher" "Choose where to create the optional SubMiner mpv shortcuts."
nsDialogs::Create 1018
Pop $0
${NSD_CreateLabel} 0u 0u 300u 30u "SubMiner mpv launches SubMiner.exe --launch-mpv so people can open mpv with the SubMiner profile from a separate Windows shortcut."
Pop $0
${NSD_CreateCheckbox} 0u 44u 280u 12u "Create Start Menu shortcut"
Pop $WindowsMpvShortcutStartMenuCheckbox
${if} $WindowsMpvShortcutStartMenuEnabled == "1"
${NSD_Check} $WindowsMpvShortcutStartMenuCheckbox
${endif}
${NSD_CreateCheckbox} 0u 64u 280u 12u "Create Desktop shortcut"
Pop $WindowsMpvShortcutDesktopCheckbox
${if} $WindowsMpvShortcutDesktopEnabled == "1"
${NSD_Check} $WindowsMpvShortcutDesktopCheckbox
${endif}
${NSD_CreateLabel} 0u 90u 300u 24u "Upgrades preserve the current SubMiner mpv shortcut locations instead of recreating shortcuts you already removed."
Pop $0
nsDialogs::Show
FunctionEnd
Function WindowsMpvShortcutPageLeave
${NSD_GetState} $WindowsMpvShortcutStartMenuCheckbox $0
${if} $0 == ${BST_CHECKED}
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
${else}
StrCpy $WindowsMpvShortcutStartMenuEnabled "0"
${endif}
${NSD_GetState} $WindowsMpvShortcutDesktopCheckbox $0
${if} $0 == ${BST_CHECKED}
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
${else}
StrCpy $WindowsMpvShortcutDesktopEnabled "0"
${endif}
FunctionEnd
!macro customInstall
Call InitializeWindowsMpvShortcutDefaults
!insertmacro ResolveWindowsMpvShortcutPaths
${if} $WindowsMpvShortcutStartMenuEnabled == "1"
!ifdef MENU_FILENAME
CreateDirectory "$SMPROGRAMS\${MENU_FILENAME}"
!endif
CreateShortCut "$WindowsMpvShortcutStartMenuPath" "$appExe" "--launch-mpv" "$appExe" 0 "" "" "Launch mpv with the SubMiner profile"
# electron-builder's upstream NSIS templates use the same WinShell call for AppUserModelID wiring.
# WinShell.dll comes from electron-builder's cached nsis-resources bundle, so bun run build:win needs no extra repo-local setup.
ClearErrors
WinShell::SetLnkAUMI "$WindowsMpvShortcutStartMenuPath" "${APP_ID}"
${else}
Delete "$WindowsMpvShortcutStartMenuPath"
${endif}
${if} $WindowsMpvShortcutDesktopEnabled == "1"
CreateShortCut "$WindowsMpvShortcutDesktopPath" "$appExe" "--launch-mpv" "$appExe" 0 "" "" "Launch mpv with the SubMiner profile"
# ClearErrors keeps the optional AUMI assignment non-fatal if the packaging environment is missing WinShell.
ClearErrors
WinShell::SetLnkAUMI "$WindowsMpvShortcutDesktopPath" "${APP_ID}"
${else}
Delete "$WindowsMpvShortcutDesktopPath"
${endif}
System::Call 'Shell32::SHChangeNotify(i 0x8000000, i 0, i 0, i 0)'
!macroend
!endif
!macro customUnInstall
!insertmacro ResolveWindowsMpvShortcutPaths
Delete "$WindowsMpvShortcutStartMenuPath"
Delete "$WindowsMpvShortcutDesktopPath"
!macroend

View File

@@ -0,0 +1,21 @@
<?xml version="1.0" encoding="utf-8"?>
<artifact-configuration xmlns="http://signpath.io/artifact-configuration/v1">
<zip-file>
<pe-file path="SubMiner-*.exe" max-matches="unbounded">
<authenticode-sign />
</pe-file>
<zip-file path="SubMiner-*.zip" max-matches="unbounded">
<directory path="*">
<pe-file path="*.exe" max-matches="unbounded">
<authenticode-sign />
</pe-file>
<pe-file path="*.dll" max-matches="unbounded">
<authenticode-sign />
</pe-file>
<pe-file path="*.node" max-matches="unbounded">
<authenticode-sign />
</pe-file>
</directory>
</zip-file>
</zip-file>
</artifact-configuration>

View File

@@ -9,6 +9,7 @@
"commander": "^14.0.3",
"discord-rpc": "^4.0.1",
"jsonc-parser": "^3.3.1",
"libsql": "^0.5.22",
"ws": "^8.19.0",
},
"devDependencies": {
@@ -99,10 +100,30 @@
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "7.1.2" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
"@libsql/darwin-arm64": ["@libsql/darwin-arm64@0.5.22", "", { "os": "darwin", "cpu": "arm64" }, "sha512-4B8ZlX3nIDPndfct7GNe0nI3Yw6ibocEicWdC4fvQbSs/jdq/RC2oCsoJxJ4NzXkvktX70C1J4FcmmoBy069UA=="],
"@libsql/darwin-x64": ["@libsql/darwin-x64@0.5.22", "", { "os": "darwin", "cpu": "x64" }, "sha512-ny2HYWt6lFSIdNFzUFIJ04uiW6finXfMNJ7wypkAD8Pqdm6nAByO+Fdqu8t7sD0sqJGeUCiOg480icjyQ2/8VA=="],
"@libsql/linux-arm-gnueabihf": ["@libsql/linux-arm-gnueabihf@0.5.22", "", { "os": "linux", "cpu": "arm" }, "sha512-3Uo3SoDPJe/zBnyZKosziRGtszXaEtv57raWrZIahtQDsjxBVjuzYQinCm9LRCJCUT5t2r5Z5nLDPJi2CwZVoA=="],
"@libsql/linux-arm-musleabihf": ["@libsql/linux-arm-musleabihf@0.5.22", "", { "os": "linux", "cpu": "arm" }, "sha512-LCsXh07jvSojTNJptT9CowOzwITznD+YFGGW+1XxUr7fS+7/ydUrpDfsMX7UqTqjm7xG17eq86VkWJgHJfvpNg=="],
"@libsql/linux-arm64-gnu": ["@libsql/linux-arm64-gnu@0.5.22", "", { "os": "linux", "cpu": "arm64" }, "sha512-KSdnOMy88c9mpOFKUEzPskSaF3VLflfSUCBwas/pn1/sV3pEhtMF6H8VUCd2rsedwoukeeCSEONqX7LLnQwRMA=="],
"@libsql/linux-arm64-musl": ["@libsql/linux-arm64-musl@0.5.22", "", { "os": "linux", "cpu": "arm64" }, "sha512-mCHSMAsDTLK5YH//lcV3eFEgiR23Ym0U9oEvgZA0667gqRZg/2px+7LshDvErEKv2XZ8ixzw3p1IrBzLQHGSsw=="],
"@libsql/linux-x64-gnu": ["@libsql/linux-x64-gnu@0.5.22", "", { "os": "linux", "cpu": "x64" }, "sha512-kNBHaIkSg78Y4BqAdgjcR2mBilZXs4HYkAmi58J+4GRwDQZh5fIUWbnQvB9f95DkWUIGVeenqLRFY2pcTmlsew=="],
"@libsql/linux-x64-musl": ["@libsql/linux-x64-musl@0.5.22", "", { "os": "linux", "cpu": "x64" }, "sha512-UZ4Xdxm4pu3pQXjvfJiyCzZop/9j/eA2JjmhMaAhe3EVLH2g11Fy4fwyUp9sT1QJYR1kpc2JLuybPM0kuXv/Tg=="],
"@libsql/win32-x64-msvc": ["@libsql/win32-x64-msvc@0.5.22", "", { "os": "win32", "cpu": "x64" }, "sha512-Fj0j8RnBpo43tVZUVoNK6BV/9AtDUM5S7DF3LB4qTYg1LMSZqi3yeCneUTLJD6XomQJlZzbI4mst89yspVSAnA=="],
"@malept/cross-spawn-promise": ["@malept/cross-spawn-promise@2.0.0", "", { "dependencies": { "cross-spawn": "7.0.6" } }, "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg=="],
"@malept/flatpak-bundler": ["@malept/flatpak-bundler@0.4.0", "", { "dependencies": { "debug": "4.4.3", "fs-extra": "9.1.0", "lodash": "4.17.23", "tmp-promise": "3.0.3" } }, "sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q=="],
"@neon-rs/load": ["@neon-rs/load@0.0.4", "", {}, "sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw=="],
"@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "7.1.4", "http-proxy-agent": "7.0.2", "https-proxy-agent": "7.0.6", "lru-cache": "10.4.3", "socks-proxy-agent": "8.0.5" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="],
"@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "7.7.4" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="],
@@ -255,7 +276,7 @@
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"detect-libc": ["detect-libc@2.0.2", "", {}, "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw=="],
"detect-node": ["detect-node@2.1.0", "", {}, "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g=="],
@@ -433,6 +454,8 @@
"lazy-val": ["lazy-val@1.0.5", "", {}, "sha512-0/BnGCCfyUMkBpeDgWihanIAF9JmZhHBgUhEqzvf+adhNGLoP6TaiI5oF8oyb3I45P+PcnrqihSf01M0l0G5+Q=="],
"libsql": ["libsql@0.5.22", "", { "dependencies": { "@neon-rs/load": "^0.0.4", "detect-libc": "2.0.2" }, "optionalDependencies": { "@libsql/darwin-arm64": "0.5.22", "@libsql/darwin-x64": "0.5.22", "@libsql/linux-arm-gnueabihf": "0.5.22", "@libsql/linux-arm-musleabihf": "0.5.22", "@libsql/linux-arm64-gnu": "0.5.22", "@libsql/linux-arm64-musl": "0.5.22", "@libsql/linux-x64-gnu": "0.5.22", "@libsql/linux-x64-musl": "0.5.22", "@libsql/win32-x64-msvc": "0.5.22" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "arm", "x64", "arm64", ] }, "sha512-NscWthMQt7fpU8lqd7LXMvT9pi+KhhmTHAJWUB/Lj6MWa0MKFv0F2V4C6WKKpjCVZl0VwcDz4nOI3CyaT1DDiA=="],
"lodash": ["lodash@4.17.23", "", {}, "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w=="],
"log-symbols": ["log-symbols@4.1.0", "", { "dependencies": { "chalk": "4.1.2", "is-unicode-supported": "0.1.0" } }, "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="],
@@ -703,6 +726,8 @@
"@electron/osx-sign/isbinaryfile": ["isbinaryfile@4.0.10", "", {}, "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw=="],
"@electron/rebuild/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"@electron/rebuild/semver": ["semver@7.7.4", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="],
"@electron/universal/fs-extra": ["fs-extra@11.3.3", "", { "dependencies": { "graceful-fs": "4.2.11", "jsonfile": "6.2.0", "universalify": "2.0.1" } }, "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg=="],

21
changes/README.md Normal file
View File

@@ -0,0 +1,21 @@
# Changelog Fragments
Add one `.md` file per user-visible PR in this directory.
Use this format:
```md
type: added
area: overlay
- Added keyboard navigation for Yomitan popups.
- Added auto-pause toggle when opening the popup.
```
Rules:
- `type` required: `added`, `changed`, `fixed`, `docs`, or `internal`
- `area` required: short product area like `overlay`, `launcher`, `release`
- each non-empty body line becomes a bullet
- `README.md` is ignored by the generator
- if a PR should not produce release notes, apply the `skip-changelog` label instead of adding a fragment

View File

@@ -5,7 +5,6 @@
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
*/
{
// ==========================================
// Overlay Auto-Start
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
@@ -18,7 +17,7 @@
// ==========================================
"texthooker": {
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
"openBrowser": true // Open browser setting. Values: true | false
"openBrowser": true, // Open browser setting. Values: true | false
}, // Configure texthooker startup launch and browser opening behavior.
// ==========================================
@@ -28,7 +27,7 @@
// ==========================================
"websocket": {
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
"port": 6677 // Built-in subtitle websocket server port.
"port": 6677, // Built-in subtitle websocket server port.
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
// ==========================================
@@ -38,7 +37,7 @@
// ==========================================
"annotationWebsocket": {
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
"port": 6678 // Annotated subtitle websocket server port.
"port": 6678, // Annotated subtitle websocket server port.
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
// ==========================================
@@ -47,7 +46,7 @@
// Set to debug for full runtime diagnostics.
// ==========================================
"logging": {
"level": "info" // Minimum log level for runtime logging. Values: debug | info | warn | error
"level": "info", // Minimum log level for runtime logging. Values: debug | info | warn | error
}, // Controls logging verbosity.
// ==========================================
@@ -61,7 +60,7 @@
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
"jellyfinRemoteSession": true // Warm up Jellyfin remote session at startup. Values: true | false
"jellyfinRemoteSession": true, // Warm up Jellyfin remote session at startup. Values: true | false
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
// ==========================================
@@ -82,7 +81,7 @@
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
"openJimaku": "Ctrl+Shift+J" // Open jimaku setting.
"openJimaku": "Ctrl+Shift+J", // Open jimaku setting.
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
// ==========================================
@@ -102,7 +101,7 @@
"secondarySub": {
"secondarySubLanguages": [], // Secondary sub languages setting.
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
"defaultMode": "hover" // Default mode setting.
"defaultMode": "hover", // Default mode setting.
}, // Dual subtitle track options.
// ==========================================
@@ -114,7 +113,7 @@
"alass_path": "", // Alass path setting.
"ffsubsync_path": "", // Ffsubsync path setting.
"ffmpeg_path": "", // Ffmpeg path setting.
"replace": true // Replace the active subtitle file when sync completes. Values: true | false
"replace": true, // Replace the active subtitle file when sync completes. Values: true | false
}, // Subsync engine and executable paths.
// ==========================================
@@ -122,7 +121,7 @@
// Initial vertical subtitle position from the bottom.
// ==========================================
"subtitlePosition": {
"yPercent": 10 // Y percent setting.
"yPercent": 10, // Y percent setting.
}, // Initial vertical subtitle position from the bottom.
// ==========================================
@@ -159,7 +158,7 @@
"N2": "#f5a97f", // N2 setting.
"N3": "#f9e2af", // N3 setting.
"N4": "#a6e3a1", // N4 setting.
"N5": "#8aadf4" // N5 setting.
"N5": "#8aadf4", // N5 setting.
}, // Jlpt colors setting.
"frequencyDictionary": {
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
@@ -168,13 +167,7 @@
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
"bandedColors": [
"#ed8796",
"#f5a97f",
"#f9e2af",
"#8bd5ca",
"#8aadf4"
] // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#8bd5ca", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
}, // Frequency dictionary setting.
"secondary": {
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
@@ -185,18 +178,31 @@
"wordSpacing": 0, // Word spacing setting.
"fontKerning": "normal", // Font kerning setting.
"textRendering": "geometricPrecision", // Text rendering setting.
"textShadow": "0 3px 10px rgba(0,0,0,0.69)", // Text shadow setting.
"backgroundColor": "transparent", // Background color setting.
"textShadow": "0 2px 4px rgba(0,0,0,0.95), 0 0 8px rgba(0,0,0,0.8), 0 0 16px rgba(0,0,0,0.55)", // Text shadow setting.
"backgroundColor": "rgba(20, 22, 34, 0.78)", // Background color setting.
"backdropFilter": "blur(6px)", // Backdrop filter setting.
"fontWeight": "normal", // Font weight setting.
"fontStyle": "normal" // Font style setting.
} // Secondary setting.
"fontWeight": "600", // Font weight setting.
"fontStyle": "normal", // Font style setting.
}, // Secondary setting.
}, // Primary and secondary subtitle styling.
// ==========================================
// Shared AI Provider
// Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
// ==========================================
"ai": {
"enabled": false, // Enable shared OpenAI-compatible AI provider features. Values: true | false
"apiKey": "", // Static API key for the shared OpenAI-compatible AI provider.
"apiKeyCommand": "", // Shell command used to resolve the shared AI provider API key.
"baseUrl": "https://openrouter.ai/api", // Base URL for the shared OpenAI-compatible AI provider.
"requestTimeoutMs": 15000, // Timeout in milliseconds for shared AI provider requests.
}, // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
// ==========================================
// AnkiConnect Integration
// Automatic Anki updates and media generation options.
// Hot-reload: AI translation settings update live while SubMiner is running.
// Hot-reload: ankiConnect.ai.enabled updates live while SubMiner is running.
// Shared AI provider transport settings are read from top-level ai and typically require restart.
// Most other AnkiConnect settings still require restart.
// ==========================================
"ankiConnect": {
@@ -207,26 +213,20 @@
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
"port": 8766, // Bind port for local AnkiConnect proxy.
"upstreamUrl": "http://127.0.0.1:8765" // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
"upstreamUrl": "http://127.0.0.1:8765", // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
}, // Proxy setting.
"tags": [
"SubMiner"
], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
"tags": ["SubMiner"], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
"fields": {
"audio": "ExpressionAudio", // Audio setting.
"image": "Picture", // Image setting.
"sentence": "Sentence", // Sentence setting.
"miscInfo": "MiscInfo", // Misc info setting.
"translation": "SelectionText" // Translation setting.
"translation": "SelectionText", // Translation setting.
}, // Fields setting.
"ai": {
"enabled": false, // Enabled setting. Values: true | false
"alwaysUseAiTranslation": false, // Always use ai translation setting. Values: true | false
"apiKey": "", // Api key setting.
"model": "openai/gpt-4o-mini", // Model setting.
"baseUrl": "https://openrouter.ai/api", // Base url setting.
"targetLanguage": "English", // Target language setting.
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations." // System prompt setting.
"enabled": false, // Enable AI provider usage for Anki translation/enrichment flows. Values: true | false
"model": "", // Optional model override for Anki AI translation/enrichment flows.
"systemPrompt": "", // Optional system prompt override for Anki AI translation/enrichment flows.
}, // Ai setting.
"media": {
"generateAudio": true, // Generate audio setting. Values: true | false
@@ -239,7 +239,7 @@
"animatedCrf": 35, // Animated crf setting.
"audioPadding": 0.5, // Audio padding setting.
"fallbackDuration": 3, // Fallback duration setting.
"maxMediaDuration": 30 // Max media duration setting.
"maxMediaDuration": 30, // Max media duration setting.
}, // Media setting.
"behavior": {
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
@@ -247,7 +247,7 @@
"mediaInsertMode": "append", // Media insert mode setting.
"highlightWord": true, // Highlight word setting. Values: true | false
"notificationType": "osd", // Notification type setting.
"autoUpdateNewCards": true // Automatically update newly added cards. Values: true | false
"autoUpdateNewCards": true, // Automatically update newly added cards. Values: true | false
}, // Behavior setting.
"nPlusOne": {
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
@@ -256,20 +256,20 @@
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
"knownWord": "#a6da95" // Color used for legacy known-word highlights.
"knownWord": "#a6da95", // Color used for legacy known-word highlights.
}, // N plus one setting.
"metadata": {
"pattern": "[SubMiner] %f (%t)" // Pattern setting.
"pattern": "[SubMiner] %f (%t)", // Pattern setting.
}, // Metadata setting.
"isLapis": {
"enabled": false, // Enabled setting. Values: true | false
"sentenceCardModel": "Japanese sentences" // Sentence card model setting.
"sentenceCardModel": "Japanese sentences", // Sentence card model setting.
}, // Is lapis setting.
"isKiku": {
"enabled": false, // Enabled setting. Values: true | false
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
"deleteDuplicateInAuto": true // Delete duplicate in auto setting. Values: true | false
} // Is kiku setting.
"deleteDuplicateInAuto": true, // Delete duplicate in auto setting. Values: true | false
}, // Is kiku setting.
}, // Automatic Anki updates and media generation options.
// ==========================================
@@ -279,22 +279,25 @@
"jimaku": {
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
"maxEntryResults": 10 // Maximum Jimaku search results returned.
"maxEntryResults": 10, // Maximum Jimaku search results returned.
}, // Jimaku API configuration and defaults.
// ==========================================
// YouTube Subtitle Generation
// Defaults for subminer YouTube subtitle extraction/transcription mode.
// Defaults for SubMiner YouTube subtitle generation.
// ==========================================
"youtubeSubgen": {
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
"whisperModel": "", // Path to whisper model used for fallback transcription.
"primarySubLanguages": [
"ja",
"jpn"
] // Comma-separated primary subtitle language priority used by the launcher.
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
"whisperVadModel": "", // Path to optional whisper VAD model used for subtitle generation.
"whisperThreads": 4, // Thread count passed to whisper.cpp subtitle generation runs.
"fixWithAi": false, // Use shared AI provider to post-process whisper-generated YouTube subtitles. Values: true | false
"ai": {
"model": "", // Optional model override for YouTube subtitle AI post-processing.
"systemPrompt": "", // Optional system prompt override for YouTube subtitle AI post-processing.
}, // Ai setting.
"primarySubLanguages": ["ja", "jpn"], // Comma-separated primary subtitle language priority used by the launcher.
}, // Defaults for SubMiner YouTube subtitle generation.
// ==========================================
// Anilist
@@ -314,9 +317,9 @@
"collapsibleSections": {
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
"voicedBy": false // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
} // Collapsible sections setting.
} // Character dictionary setting.
"voicedBy": false, // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
}, // Collapsible sections setting.
}, // Character dictionary setting.
}, // Anilist API credentials and update behavior.
// ==========================================
@@ -340,16 +343,8 @@
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
"directPlayContainers": [
"mkv",
"mp4",
"webm",
"mov",
"flac",
"mp3",
"aac"
], // Container allowlist for direct play decisions.
"transcodeVideoCodec": "h264" // Preferred transcode video codec when direct play is unavailable.
"directPlayContainers": ["mkv", "mp4", "webm", "mov", "flac", "mp3", "aac"], // Container allowlist for direct play decisions.
"transcodeVideoCodec": "h264", // Preferred transcode video codec when direct play is unavailable.
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
// ==========================================
@@ -360,7 +355,7 @@
"discordPresence": {
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
"debounceMs": 750 // Debounce delay used to collapse bursty presence updates.
"debounceMs": 750, // Debounce delay used to collapse bursty presence updates.
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
// ==========================================
@@ -382,7 +377,7 @@
"telemetryDays": 30, // Telemetry retention window in days.
"dailyRollupsDays": 365, // Daily rollup retention window in days.
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
"vacuumIntervalDays": 7 // Minimum days between VACUUM runs.
} // Retention setting.
} // Enable/disable immersion tracking.
"vacuumIntervalDays": 7, // Minimum days between VACUUM runs.
}, // Retention setting.
}, // Enable/disable immersion tracking.
}

View File

@@ -0,0 +1,77 @@
# Unsigned Windows Release Builds Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Publish unsigned Windows release artifacts in GitHub Actions while adding an explicit local `build:win:unsigned` script.
**Architecture:** Keep Windows packaging on `electron-builder`, but stop the release workflow from routing artifacts through SignPath. The Windows release job will build unsigned artifacts and upload them directly under the existing `windows` artifact name so the downstream release job stays stable. Local developer behavior remains unchanged except for a new explicit unsigned build script.
**Tech Stack:** GitHub Actions, Bun, Electron Builder, Node test runner
---
### Task 1: Track the workflow contract change
**Files:**
- Create: `backlog/tasks/task-138 - Publish-unsigned-Windows-release-artifacts-and-add-local-unsigned-build-script.md`
- Create: `changes/unsigned-windows-release-builds.md`
**Step 1: Write the backlog task + changelog fragment**
Document the scope: unsigned Windows release CI, new local unsigned script, no SignPath dependency.
**Step 2: Review file formatting**
Run: `sed -n '1,220p' backlog/tasks/task-138\ -\ Publish-unsigned-Windows-release-artifacts-and-add-local-unsigned-build-script.md && sed -n '1,80p' changes/unsigned-windows-release-builds.md`
Expected: task metadata matches existing backlog files; changelog fragment matches `changes/README.md` format.
### Task 2: Write failing workflow regression tests
**Files:**
- Modify: `src/release-workflow.test.ts`
**Step 1: Write the failing test**
Replace SignPath-specific workflow assertions with assertions for:
- unsigned Windows artifacts built via `bun run build:win:unsigned`
- direct `windows` artifact upload from `release/*.exe` and `release/*.zip`
- no SignPath action references
- package scripts include `build:win:unsigned`
**Step 2: Run test to verify it fails**
Run: `bun test src/release-workflow.test.ts`
Expected: FAIL because the current workflow still validates SignPath secrets and submits signing requests.
### Task 3: Patch package scripts and release workflow
**Files:**
- Modify: `package.json`
- Modify: `.github/workflows/release.yml`
**Step 1: Write minimal implementation**
- add `build:win:unsigned` that clears Windows signing env and disables auto discovery before invoking `electron-builder --win nsis zip --publish never`
- change the Windows release job to remove SignPath secret validation/submission
- build Windows artifacts with `bun run build:win:unsigned`
- upload `release/*.exe` and `release/*.zip` directly as `windows`
**Step 2: Run tests to verify they pass**
Run: `bun test src/release-workflow.test.ts`
Expected: PASS
### Task 4: Run focused verification
**Files:**
- Modify: none
**Step 1: Run focused checks**
Run: `bun test src/release-workflow.test.ts && bun run typecheck`
Expected: all green
**Step 2: Spot-check diff**
Run: `git --no-pager diff -- .github/workflows/release.yml package.json src/release-workflow.test.ts changes/unsigned-windows-release-builds.md backlog/tasks/task-138\ -\ Publish-unsigned-Windows-release-artifacts-and-add-local-unsigned-build-script.md docs/plans/2026-03-09-unsigned-windows-release-builds.md`
Expected: only scoped unsigned-Windows workflow/script/docs changes

View File

@@ -5,7 +5,7 @@ import { fail, log } from '../log.js';
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
import {
loadSubtitleIntoMpv,
cleanupPlaybackSession,
startMpv,
startOverlay,
state,
@@ -34,12 +34,7 @@ function checkDependencies(args: Args): void {
missing.push('yt-dlp');
}
if (
args.targetKind === 'url' &&
isYoutubeTarget(args.target) &&
args.youtubeSubgenMode !== 'off' &&
!commandExists('ffmpeg')
) {
if (args.targetKind === 'url' && isYoutubeTarget(args.target) && !commandExists('ffmpeg')) {
missing.push('ffmpeg');
}
@@ -164,22 +159,28 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
const isYoutubeUrl = selectedTarget.kind === 'url' && isYoutubeTarget(selectedTarget.target);
let preloadedSubtitles: { primaryPath?: string; secondaryPath?: string } | undefined;
if (isYoutubeUrl && args.youtubeSubgenMode === 'preprocess') {
log('info', args.logLevel, 'YouTube subtitle mode: preprocess');
if (isYoutubeUrl) {
log('info', args.logLevel, 'YouTube subtitle generation: preload before mpv');
const generated = await generateYoutubeSubtitles(selectedTarget.target, args);
preloadedSubtitles = {
primaryPath: generated.primaryPath,
secondaryPath: generated.secondaryPath,
};
const primaryStatus = generated.primaryPath
? 'ready'
: generated.primaryNative
? 'native'
: 'missing';
const secondaryStatus = generated.secondaryPath
? 'ready'
: generated.secondaryNative
? 'native'
: 'missing';
log(
'info',
args.logLevel,
`YouTube preprocess result: primary=${generated.primaryPath ? 'ready' : 'missing'}, secondary=${generated.secondaryPath ? 'ready' : 'missing'}`,
`YouTube subtitle result: primary=${primaryStatus}, secondary=${secondaryStatus}`,
);
} else if (isYoutubeUrl && args.youtubeSubgenMode === 'automatic') {
log('info', args.logLevel, 'YouTube subtitle mode: automatic (background)');
} else if (isYoutubeUrl) {
log('info', args.logLevel, 'YouTube subtitle mode: off');
}
const shouldPauseUntilOverlayReady =
@@ -201,26 +202,6 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
{ startPaused: shouldPauseUntilOverlayReady },
);
if (isYoutubeUrl && args.youtubeSubgenMode === 'automatic') {
void generateYoutubeSubtitles(selectedTarget.target, args, async (lang, subtitlePath) => {
try {
await loadSubtitleIntoMpv(mpvSocketPath, subtitlePath, lang === 'primary', args.logLevel);
} catch (error) {
log(
'warn',
args.logLevel,
`Generated subtitle ready but failed to load in mpv: ${(error as Error).message}`,
);
}
}).catch((error) => {
log(
'warn',
args.logLevel,
`Background subtitle generation failed: ${(error as Error).message}`,
);
});
}
const ready = await waitForUnixSocketReady(mpvSocketPath, 10000);
const pluginAutoStartEnabled = pluginRuntimeConfig.autoStart;
const shouldStartOverlay = args.startOverlay || args.autoStartOverlay;
@@ -264,9 +245,10 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
}
const finalize = (code: number | null | undefined) => {
stopOverlay(args);
processAdapter.setExitCode(code ?? 0);
resolve();
void cleanupPlaybackSession(args).finally(() => {
processAdapter.setExitCode(code ?? 0);
resolve();
});
};
if (mpvProc.exitCode !== null && mpvProc.exitCode !== undefined) {

View File

@@ -2,14 +2,32 @@ import test from 'node:test';
import assert from 'node:assert/strict';
import { parseLauncherYoutubeSubgenConfig } from './config/youtube-subgen-config.js';
import { parseLauncherJellyfinConfig } from './config/jellyfin-config.js';
import { parsePluginRuntimeConfigContent } from './config/plugin-runtime-config.js';
import {
getPluginConfigCandidates,
parsePluginRuntimeConfigContent,
} from './config/plugin-runtime-config.js';
import { getDefaultSocketPath } from './types.js';
test('parseLauncherYoutubeSubgenConfig keeps only valid typed values', () => {
const parsed = parseLauncherYoutubeSubgenConfig({
ai: {
enabled: true,
apiKey: 'shared-key',
baseUrl: 'https://openrouter.ai/api',
model: 'openrouter/shared-model',
systemPrompt: 'Legacy shared prompt.',
requestTimeoutMs: 12000,
},
youtubeSubgen: {
mode: 'preprocess',
whisperBin: '/usr/bin/whisper',
whisperModel: '/models/base.bin',
whisperVadModel: '/models/vad.bin',
whisperThreads: 6.8,
fixWithAi: true,
ai: {
model: 'openrouter/subgen-model',
systemPrompt: 'Fix subtitles only.',
},
primarySubLanguages: ['ja', 42, 'en'],
},
secondarySub: {
@@ -24,9 +42,17 @@ test('parseLauncherYoutubeSubgenConfig keeps only valid typed values', () => {
},
});
assert.equal(parsed.mode, 'preprocess');
assert.equal('mode' in parsed, false);
assert.deepEqual(parsed.primarySubLanguages, ['ja', 'en']);
assert.deepEqual(parsed.secondarySubLanguages, ['eng', 'deu']);
assert.equal(parsed.whisperVadModel, '/models/vad.bin');
assert.equal(parsed.whisperThreads, 6);
assert.equal(parsed.fixWithAi, true);
assert.equal(parsed.ai?.enabled, true);
assert.equal(parsed.ai?.apiKey, 'shared-key');
assert.equal(parsed.ai?.model, 'openrouter/subgen-model');
assert.equal(parsed.ai?.systemPrompt, 'Fix subtitles only.');
assert.equal(parsed.ai?.requestTimeoutMs, 12000);
assert.equal(parsed.jimakuLanguagePreference, 'ja');
assert.equal(parsed.jimakuMaxEntryResults, 8);
});
@@ -75,3 +101,18 @@ auto_start_pause_until_ready = off
assert.equal(parsed.autoStartVisibleOverlay, false);
assert.equal(parsed.autoStartPauseUntilReady, false);
});
test('getPluginConfigCandidates resolves Windows mpv script-opts path', () => {
assert.deepEqual(
getPluginConfigCandidates({
platform: 'win32',
homeDir: 'C:\\Users\\tester',
appDataDir: 'C:\\Users\\tester\\AppData\\Roaming',
}),
['C:\\Users\\tester\\AppData\\Roaming\\mpv\\script-opts\\subminer.conf'],
);
});
test('getDefaultSocketPath returns Windows named pipe default', () => {
assert.equal(getDefaultSocketPath('win32'), '\\\\.\\pipe\\subminer-socket');
});

View File

@@ -4,6 +4,7 @@ import { resolveConfigFilePath } from '../src/config/path-resolution.js';
export function resolveMainConfigPath(): string {
return resolveConfigFilePath({
appDataDir: process.env.APPDATA,
xdgConfigHome: process.env.XDG_CONFIG_HOME,
homeDir: os.homedir(),
existsSync: fs.existsSync,

View File

@@ -1,13 +1,7 @@
import fs from 'node:fs';
import path from 'node:path';
import { fail } from '../log.js';
import type {
Args,
Backend,
LauncherYoutubeSubgenConfig,
LogLevel,
YoutubeSubgenMode,
} from '../types.js';
import type { Args, Backend, LauncherYoutubeSubgenConfig, LogLevel } from '../types.js';
import {
DEFAULT_JIMAKU_API_BASE_URL,
DEFAULT_YOUTUBE_PRIMARY_SUB_LANGS,
@@ -54,14 +48,6 @@ function parseLogLevel(value: string): LogLevel {
fail(`Invalid log level: ${value} (must be debug, info, warn, or error)`);
}
function parseYoutubeMode(value: string): YoutubeSubgenMode {
const normalized = value.toLowerCase();
if (normalized === 'automatic' || normalized === 'preprocess' || normalized === 'off') {
return normalized as YoutubeSubgenMode;
}
fail(`Invalid yt-subgen mode: ${value} (must be automatic, preprocess, or off)`);
}
function parseBackend(value: string): Backend {
if (value === 'auto' || value === 'hyprland' || value === 'x11' || value === 'macos') {
return value as Backend;
@@ -91,13 +77,6 @@ function parseDictionaryTarget(value: string): string {
}
export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig): Args {
const envMode = (process.env.SUBMINER_YT_SUBGEN_MODE || '').toLowerCase();
const defaultMode: YoutubeSubgenMode =
envMode === 'preprocess' || envMode === 'off' || envMode === 'automatic'
? (envMode as YoutubeSubgenMode)
: launcherConfig.mode
? launcherConfig.mode
: 'automatic';
const configuredSecondaryLangs = uniqueNormalizedLangCodes(
launcherConfig.secondarySubLanguages ?? [],
);
@@ -120,12 +99,18 @@ export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig):
recursive: false,
profile: 'subminer',
startOverlay: false,
youtubeSubgenMode: defaultMode,
whisperBin: process.env.SUBMINER_WHISPER_BIN || launcherConfig.whisperBin || '',
whisperModel: process.env.SUBMINER_WHISPER_MODEL || launcherConfig.whisperModel || '',
whisperVadModel: process.env.SUBMINER_WHISPER_VAD_MODEL || launcherConfig.whisperVadModel || '',
whisperThreads: (() => {
const envValue = Number.parseInt(process.env.SUBMINER_WHISPER_THREADS || '', 10);
if (Number.isInteger(envValue) && envValue > 0) return envValue;
return launcherConfig.whisperThreads || 4;
})(),
youtubeSubgenOutDir: process.env.SUBMINER_YT_SUBGEN_OUT_DIR || DEFAULT_YOUTUBE_SUBGEN_OUT_DIR,
youtubeSubgenAudioFormat: process.env.SUBMINER_YT_SUBGEN_AUDIO_FORMAT || 'm4a',
youtubeSubgenKeepTemp: process.env.SUBMINER_YT_SUBGEN_KEEP_TEMP === '1',
youtubeFixWithAi: launcherConfig.fixWithAi === true,
jimakuApiKey: process.env.SUBMINER_JIMAKU_API_KEY || '',
jimakuApiKeyCommand: process.env.SUBMINER_JIMAKU_API_KEY_COMMAND || '',
jimakuApiBaseUrl: process.env.SUBMINER_JIMAKU_API_BASE_URL || DEFAULT_JIMAKU_API_BASE_URL,
@@ -152,6 +137,15 @@ export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig):
youtubeSecondarySubLangs: secondarySubLangs,
youtubeAudioLangs,
youtubeWhisperSourceLanguage: inferWhisperLanguage(primarySubLangs, 'ja'),
aiConfig: {
enabled: launcherConfig.ai?.enabled,
apiKey: launcherConfig.ai?.apiKey,
apiKeyCommand: launcherConfig.ai?.apiKeyCommand,
baseUrl: launcherConfig.ai?.baseUrl,
model: launcherConfig.ai?.model,
systemPrompt: launcherConfig.ai?.systemPrompt,
requestTimeoutMs: launcherConfig.ai?.requestTimeoutMs,
},
useTexthooker: true,
autoStartOverlay: false,
texthookerOnly: false,
@@ -242,8 +236,6 @@ export function applyInvocationsToArgs(parsed: Args, invocations: CliInvocations
if (invocations.ytInvocation) {
if (invocations.ytInvocation.logLevel)
parsed.logLevel = parseLogLevel(invocations.ytInvocation.logLevel);
if (invocations.ytInvocation.mode)
parsed.youtubeSubgenMode = parseYoutubeMode(invocations.ytInvocation.mode);
if (invocations.ytInvocation.outDir)
parsed.youtubeSubgenOutDir = invocations.ytInvocation.outDir;
if (invocations.ytInvocation.keepTemp) parsed.youtubeSubgenKeepTemp = true;
@@ -251,6 +243,10 @@ export function applyInvocationsToArgs(parsed: Args, invocations: CliInvocations
parsed.whisperBin = invocations.ytInvocation.whisperBin;
if (invocations.ytInvocation.whisperModel)
parsed.whisperModel = invocations.ytInvocation.whisperModel;
if (invocations.ytInvocation.whisperVadModel)
parsed.whisperVadModel = invocations.ytInvocation.whisperVadModel;
if (invocations.ytInvocation.whisperThreads)
parsed.whisperThreads = invocations.ytInvocation.whisperThreads;
if (invocations.ytInvocation.ytSubgenAudioFormat) {
parsed.youtubeSubgenAudioFormat = invocations.ytInvocation.ytSubgenAudioFormat;
}

View File

@@ -16,11 +16,12 @@ export interface JellyfinInvocation {
export interface YtInvocation {
target?: string;
mode?: string;
outDir?: string;
keepTemp?: boolean;
whisperBin?: string;
whisperModel?: string;
whisperVadModel?: string;
whisperThreads?: number;
ytSubgenAudioFormat?: string;
logLevel?: string;
}
@@ -201,21 +202,27 @@ export function parseCliPrograms(
.alias('youtube')
.description('YouTube workflows')
.argument('[target]', 'YouTube URL or ytsearch: query')
.option('-m, --mode <mode>', 'Subtitle generation mode')
.option('-o, --out-dir <dir>', 'Subtitle output dir')
.option('--keep-temp', 'Keep temp files')
.option('--whisper-bin <path>', 'whisper.cpp CLI path')
.option('--whisper-model <path>', 'whisper model path')
.option('--whisper-vad-model <path>', 'whisper.cpp VAD model path')
.option('--whisper-threads <n>', 'whisper.cpp thread count')
.option('--yt-subgen-audio-format <format>', 'Audio extraction format')
.option('--log-level <level>', 'Log level')
.action((target: string | undefined, options: Record<string, unknown>) => {
ytInvocation = {
target,
mode: typeof options.mode === 'string' ? options.mode : undefined,
outDir: typeof options.outDir === 'string' ? options.outDir : undefined,
keepTemp: options.keepTemp === true,
whisperBin: typeof options.whisperBin === 'string' ? options.whisperBin : undefined,
whisperModel: typeof options.whisperModel === 'string' ? options.whisperModel : undefined,
whisperVadModel:
typeof options.whisperVadModel === 'string' ? options.whisperVadModel : undefined,
whisperThreads:
typeof options.whisperThreads === 'number' && Number.isFinite(options.whisperThreads)
? Math.floor(options.whisperThreads)
: undefined,
ytSubgenAudioFormat:
typeof options.ytSubgenAudioFormat === 'string' ? options.ytSubgenAudioFormat : undefined,
logLevel: typeof options.logLevel === 'string' ? options.logLevel : undefined,

View File

@@ -5,12 +5,36 @@ import { log } from '../log.js';
import type { LogLevel, PluginRuntimeConfig } from '../types.js';
import { DEFAULT_SOCKET_PATH } from '../types.js';
export function getPluginConfigCandidates(): string[] {
const xdgConfigHome = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config');
function getPlatformPath(platform: NodeJS.Platform): typeof path.posix | typeof path.win32 {
return platform === 'win32' ? path.win32 : path.posix;
}
export function getPluginConfigCandidates(options?: {
platform?: NodeJS.Platform;
homeDir?: string;
xdgConfigHome?: string;
appDataDir?: string;
}): string[] {
const platform = options?.platform ?? process.platform;
const homeDir = options?.homeDir ?? os.homedir();
const platformPath = getPlatformPath(platform);
if (platform === 'win32') {
const appDataDir =
options?.appDataDir?.trim() ||
process.env.APPDATA?.trim() ||
platformPath.join(homeDir, 'AppData', 'Roaming');
return [platformPath.join(appDataDir, 'mpv', 'script-opts', 'subminer.conf')];
}
const xdgConfigHome =
options?.xdgConfigHome?.trim() ||
process.env.XDG_CONFIG_HOME ||
platformPath.join(homeDir, '.config');
return Array.from(
new Set([
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
path.join(os.homedir(), '.config', 'mpv', 'script-opts', 'subminer.conf'),
platformPath.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
platformPath.join(homeDir, '.config', 'mpv', 'script-opts', 'subminer.conf'),
]),
);
}

View File

@@ -5,6 +5,7 @@ import { resolveConfigFilePath } from '../../src/config/path-resolution.js';
export function resolveLauncherMainConfigPath(): string {
return resolveConfigFilePath({
appDataDir: process.env.APPDATA,
xdgConfigHome: process.env.XDG_CONFIG_HOME,
homeDir: os.homedir(),
existsSync: fs.existsSync,

View File

@@ -1,4 +1,5 @@
import type { LauncherYoutubeSubgenConfig } from '../types.js';
import { mergeAiConfig } from '../../src/ai/config.js';
function asStringArray(value: unknown): string[] | undefined {
if (!Array.isArray(value)) return undefined;
@@ -21,17 +22,58 @@ export function parseLauncherYoutubeSubgenConfig(
const jimakuRaw = root.jimaku;
const jimaku =
jimakuRaw && typeof jimakuRaw === 'object' ? (jimakuRaw as Record<string, unknown>) : null;
const aiRaw = root.ai;
const ai = aiRaw && typeof aiRaw === 'object' ? (aiRaw as Record<string, unknown>) : null;
const youtubeAiRaw = youtubeSubgen?.ai;
const youtubeAi =
youtubeAiRaw && typeof youtubeAiRaw === 'object'
? (youtubeAiRaw as Record<string, unknown>)
: null;
const mode = youtubeSubgen?.mode;
const jimakuLanguagePreference = jimaku?.languagePreference;
const jimakuMaxEntryResults = jimaku?.maxEntryResults;
return {
mode: mode === 'automatic' || mode === 'preprocess' || mode === 'off' ? mode : undefined,
whisperBin:
typeof youtubeSubgen?.whisperBin === 'string' ? youtubeSubgen.whisperBin : undefined,
whisperModel:
typeof youtubeSubgen?.whisperModel === 'string' ? youtubeSubgen.whisperModel : undefined,
whisperVadModel:
typeof youtubeSubgen?.whisperVadModel === 'string'
? youtubeSubgen.whisperVadModel
: undefined,
whisperThreads:
typeof youtubeSubgen?.whisperThreads === 'number' &&
Number.isFinite(youtubeSubgen.whisperThreads) &&
youtubeSubgen.whisperThreads > 0
? Math.floor(youtubeSubgen.whisperThreads)
: undefined,
fixWithAi: typeof youtubeSubgen?.fixWithAi === 'boolean' ? youtubeSubgen.fixWithAi : undefined,
ai: mergeAiConfig(
ai
? {
enabled: typeof ai.enabled === 'boolean' ? ai.enabled : undefined,
apiKey: typeof ai.apiKey === 'string' ? ai.apiKey : undefined,
apiKeyCommand: typeof ai.apiKeyCommand === 'string' ? ai.apiKeyCommand : undefined,
baseUrl: typeof ai.baseUrl === 'string' ? ai.baseUrl : undefined,
model: typeof ai.model === 'string' ? ai.model : undefined,
systemPrompt: typeof ai.systemPrompt === 'string' ? ai.systemPrompt : undefined,
requestTimeoutMs:
typeof ai.requestTimeoutMs === 'number' &&
Number.isFinite(ai.requestTimeoutMs) &&
ai.requestTimeoutMs > 0
? Math.floor(ai.requestTimeoutMs)
: undefined,
}
: undefined,
youtubeAi
? {
model: typeof youtubeAi.model === 'string' ? youtubeAi.model : undefined,
systemPrompt:
typeof youtubeAi.systemPrompt === 'string' ? youtubeAi.systemPrompt : undefined,
}
: undefined,
),
primarySubLanguages: asStringArray(youtubeSubgen?.primarySubLanguages),
secondarySubLanguages: asStringArray(secondarySub?.secondarySubLanguages),
jimakuApiKey: typeof jimaku?.apiKey === 'string' ? jimaku.apiKey : undefined,

24
launcher/log.test.ts Normal file
View File

@@ -0,0 +1,24 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import path from 'node:path';
import { getDefaultMpvLogFile } from './types.js';
test('getDefaultMpvLogFile uses APPDATA on windows', () => {
const resolved = getDefaultMpvLogFile({
platform: 'win32',
homeDir: 'C:\\Users\\tester',
appDataDir: 'C:\\Users\\tester\\AppData\\Roaming',
});
assert.equal(
path.normalize(resolved),
path.normalize(
path.join(
'C:\\Users\\tester\\AppData\\Roaming',
'SubMiner',
'logs',
`SubMiner-${new Date().toISOString().slice(0, 10)}.log`,
),
),
);
});

View File

@@ -51,10 +51,16 @@ function runLauncher(argv: string[], env: NodeJS.ProcessEnv): RunResult {
}
function makeTestEnv(homeDir: string, xdgConfigHome: string): NodeJS.ProcessEnv {
const pathValue = process.env.Path || process.env.PATH || '';
return {
...process.env,
HOME: homeDir,
USERPROFILE: homeDir,
APPDATA: xdgConfigHome,
LOCALAPPDATA: path.join(homeDir, 'AppData', 'Local'),
XDG_CONFIG_HOME: xdgConfigHome,
PATH: pathValue,
Path: pathValue,
};
}
@@ -75,13 +81,14 @@ test('config path uses XDG_CONFIG_HOME override', () => {
test('config discovery ignores lowercase subminer candidate', () => {
const homeDir = '/home/tester';
const xdgConfigHome = '/tmp/xdg-config';
const expected = path.join(xdgConfigHome, 'SubMiner', 'config.jsonc');
const foundPaths = new Set([path.join(xdgConfigHome, 'subminer', 'config.json')]);
const expected = path.posix.join(xdgConfigHome, 'SubMiner', 'config.jsonc');
const foundPaths = new Set([path.posix.join(xdgConfigHome, 'subminer', 'config.json')]);
const resolved = resolveConfigFilePath({
xdgConfigHome,
homeDir,
existsSync: (candidate) => foundPaths.has(path.normalize(candidate)),
platform: 'linux',
existsSync: (candidate) => foundPaths.has(path.posix.normalize(candidate)),
});
assert.equal(resolved, expected);
@@ -138,6 +145,12 @@ test('mpv status exits non-zero when socket is not ready', () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const socketPath = path.join(root, 'missing.sock');
fs.mkdirSync(path.join(xdgConfigHome, 'mpv', 'script-opts'), { recursive: true });
fs.writeFileSync(
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
`socket_path=${socketPath}\n`,
);
const result = runLauncher(['mpv', 'status'], makeTestEnv(homeDir, xdgConfigHome));
assert.equal(result.status, 1);
@@ -152,6 +165,7 @@ test('doctor reports checks and exits non-zero without hard dependencies', () =>
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
PATH: '',
Path: '',
};
const result = runLauncher(['doctor'], env);
@@ -162,6 +176,136 @@ test('doctor reports checks and exits non-zero without hard dependencies', () =>
});
});
test('youtube command rejects removed --mode option', () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const appPath = path.join(root, 'fake-subminer.sh');
fs.writeFileSync(appPath, '#!/bin/sh\nexit 0\n');
fs.chmodSync(appPath, 0o755);
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
SUBMINER_APPIMAGE_PATH: appPath,
};
const result = runLauncher(
['youtube', 'https://www.youtube.com/watch?v=test123', '--mode', 'automatic'],
env,
);
assert.equal(result.status, 1);
assert.match(result.stderr, /unknown option '--mode'/i);
});
});
test('youtube playback generates subtitles before mpv launch', { timeout: 15000 }, () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const binDir = path.join(root, 'bin');
const appPath = path.join(root, 'fake-subminer.sh');
const ytdlpLogPath = path.join(root, 'yt-dlp.log');
const mpvCapturePath = path.join(root, 'mpv-order.txt');
const mpvArgsPath = path.join(root, 'mpv-args.txt');
const socketPath = path.join(root, 'mpv.sock');
const bunBinary = JSON.stringify(process.execPath.replace(/\\/g, '/'));
fs.mkdirSync(binDir, { recursive: true });
fs.mkdirSync(path.join(xdgConfigHome, 'SubMiner'), { recursive: true });
fs.mkdirSync(path.join(xdgConfigHome, 'mpv', 'script-opts'), { recursive: true });
fs.writeFileSync(
path.join(xdgConfigHome, 'SubMiner', 'setup-state.json'),
JSON.stringify({
version: 1,
status: 'completed',
completedAt: '2026-03-08T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'installed',
pluginInstallPathSummary: null,
}),
);
fs.writeFileSync(
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
`socket_path=${socketPath}\nauto_start=no\nauto_start_visible_overlay=no\nauto_start_pause_until_ready=no\n`,
);
fs.writeFileSync(appPath, '#!/bin/sh\nexit 0\n');
fs.chmodSync(appPath, 0o755);
fs.writeFileSync(
path.join(binDir, 'yt-dlp'),
`#!/bin/sh
set -eu
printf '%s\\n' "$*" >> "$SUBMINER_TEST_YTDLP_LOG"
if printf '%s\\n' "$*" | grep -q -- '--dump-single-json'; then
printf '{"id":"video123"}\\n'
exit 0
fi
out_dir=""
prev=""
for arg in "$@"; do
if [ "$prev" = "-o" ]; then
out_dir=$(dirname "$arg")
break
fi
prev="$arg"
done
mkdir -p "$out_dir"
printf '1\\n00:00:00,000 --> 00:00:01,000\\nこんにちは\\n' > "$out_dir/video123.ja.srt"
printf '1\\n00:00:00,000 --> 00:00:01,000\\nhello\\n' > "$out_dir/video123.en.srt"
`,
'utf8',
);
fs.chmodSync(path.join(binDir, 'yt-dlp'), 0o755);
fs.writeFileSync(path.join(binDir, 'ffmpeg'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.chmodSync(path.join(binDir, 'ffmpeg'), 0o755);
fs.writeFileSync(
path.join(binDir, 'mpv'),
`#!/bin/sh
set -eu
if [ -s "$SUBMINER_TEST_YTDLP_LOG" ]; then
printf 'generated-before-mpv\\n' > "$SUBMINER_TEST_MPV_ORDER"
else
printf 'mpv-before-generation\\n' > "$SUBMINER_TEST_MPV_ORDER"
fi
printf '%s\\n' "$@" > "$SUBMINER_TEST_MPV_ARGS"
socket_path=""
for arg in "$@"; do
case "$arg" in
--input-ipc-server=*)
socket_path="\${arg#--input-ipc-server=}"
;;
esac
done
${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); const socket=process.argv[1]; try { fs.rmSync(socket,{force:true}); } catch {} const server=net.createServer((conn)=>conn.end()); server.listen(socket,()=>setTimeout(()=>server.close(()=>process.exit(0)),250));" "$socket_path"
`,
'utf8',
);
fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
PATH: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
Path: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
SUBMINER_APPIMAGE_PATH: appPath,
SUBMINER_TEST_YTDLP_LOG: ytdlpLogPath,
SUBMINER_TEST_MPV_ORDER: mpvCapturePath,
SUBMINER_TEST_MPV_ARGS: mpvArgsPath,
};
const result = runLauncher(['youtube', 'https://www.youtube.com/watch?v=test123'], env);
assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`);
assert.equal(fs.readFileSync(mpvCapturePath, 'utf8').trim(), 'generated-before-mpv');
assert.match(
fs.readFileSync(mpvArgsPath, 'utf8'),
/https:\/\/www\.youtube\.com\/watch\?v=test123/,
);
assert.match(fs.readFileSync(ytdlpLogPath, 'utf8'), /--dump-single-json/);
});
});
test('dictionary command forwards --dictionary and --dictionary-target to app command path', () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
@@ -400,15 +544,20 @@ test('parseJellyfinPreviewAuthResponse returns null for invalid payloads', () =>
});
test('deriveJellyfinTokenStorePath resolves alongside config path', () => {
const tokenPath = deriveJellyfinTokenStorePath('/home/test/.config/SubMiner/config.jsonc');
assert.equal(tokenPath, '/home/test/.config/SubMiner/jellyfin-token-store.json');
const configPath = path.join('/home/test', '.config', 'SubMiner', 'config.jsonc');
const tokenPath = deriveJellyfinTokenStorePath(configPath);
assert.equal(tokenPath, path.join(path.dirname(configPath), 'jellyfin-token-store.json'));
});
test('hasStoredJellyfinSession checks token-store existence', () => {
const exists = (candidate: string): boolean =>
candidate === '/home/test/.config/SubMiner/jellyfin-token-store.json';
assert.equal(hasStoredJellyfinSession('/home/test/.config/SubMiner/config.jsonc', exists), true);
assert.equal(hasStoredJellyfinSession('/home/test/.config/Other/alt.jsonc', exists), false);
const configPath = path.join('/home/test', '.config', 'SubMiner', 'config.jsonc');
const tokenPath = deriveJellyfinTokenStorePath(configPath);
const exists = (candidate: string): boolean => candidate === tokenPath;
assert.equal(hasStoredJellyfinSession(configPath, exists), true);
assert.equal(
hasStoredJellyfinSession(path.join('/home/test', '.config', 'Other', 'alt.jsonc'), exists),
false,
);
});
test('shouldRetryWithStartForNoRunningInstance matches expected app lifecycle error', () => {

View File

@@ -5,7 +5,14 @@ import path from 'node:path';
import net from 'node:net';
import { EventEmitter } from 'node:events';
import type { Args } from './types';
import { runAppCommandCaptureOutput, startOverlay, state, waitForUnixSocketReady } from './mpv';
import {
cleanupPlaybackSession,
runAppCommandCaptureOutput,
shouldResolveAniSkipMetadata,
startOverlay,
state,
waitForUnixSocketReady,
} from './mpv';
import * as mpvModule from './mpv';
function createTempSocketPath(): { dir: string; socketPath: string } {
@@ -73,6 +80,20 @@ test('waitForUnixSocketReady returns true when socket becomes connectable before
}
});
test('shouldResolveAniSkipMetadata skips URL and YouTube-preloaded playback', () => {
assert.equal(shouldResolveAniSkipMetadata('/media/show.mkv', 'file'), true);
assert.equal(
shouldResolveAniSkipMetadata('https://www.youtube.com/watch?v=test123', 'url'),
false,
);
assert.equal(
shouldResolveAniSkipMetadata('/tmp/video123.webm', 'file', {
primaryPath: '/tmp/video123.ja.srt',
}),
false,
);
});
function makeArgs(overrides: Partial<Args> = {}): Args {
return {
backend: 'x11',
@@ -80,16 +101,19 @@ function makeArgs(overrides: Partial<Args> = {}): Args {
recursive: false,
profile: '',
startOverlay: false,
youtubeSubgenMode: 'off',
whisperBin: '',
whisperModel: '',
whisperVadModel: '',
whisperThreads: 4,
youtubeSubgenOutDir: '',
youtubeSubgenAudioFormat: 'wav',
youtubeSubgenKeepTemp: false,
youtubeFixWithAi: false,
youtubePrimarySubLangs: [],
youtubeSecondarySubLangs: [],
youtubeAudioLangs: [],
youtubeWhisperSourceLanguage: 'ja',
aiConfig: {},
useTexthooker: false,
autoStartOverlay: false,
texthookerOnly: false,
@@ -152,3 +176,59 @@ test('startOverlay resolves without fixed 2s sleep when readiness signals arrive
fs.rmSync(dir, { recursive: true, force: true });
}
});
test('cleanupPlaybackSession preserves background app while stopping mpv-owned children', async () => {
const { dir } = createTempSocketPath();
const appPath = path.join(dir, 'fake-subminer.sh');
const appInvocationsPath = path.join(dir, 'app-invocations.log');
fs.writeFileSync(
appPath,
`#!/bin/sh\necho \"$@\" >> ${JSON.stringify(appInvocationsPath)}\nexit 0\n`,
);
fs.chmodSync(appPath, 0o755);
const calls: string[] = [];
const overlayProc = {
killed: false,
kill: () => {
calls.push('overlay-kill');
return true;
},
} as unknown as NonNullable<typeof state.overlayProc>;
const mpvProc = {
killed: false,
kill: () => {
calls.push('mpv-kill');
return true;
},
} as unknown as NonNullable<typeof state.mpvProc>;
const helperProc = {
killed: false,
kill: () => {
calls.push('helper-kill');
return true;
},
} as unknown as NonNullable<typeof state.overlayProc>;
state.stopRequested = false;
state.appPath = appPath;
state.overlayManagedByLauncher = true;
state.overlayProc = overlayProc;
state.mpvProc = mpvProc;
state.youtubeSubgenChildren.add(helperProc);
try {
await cleanupPlaybackSession(makeArgs());
assert.deepEqual(calls, ['mpv-kill', 'helper-kill']);
assert.equal(fs.existsSync(appInvocationsPath), false);
} finally {
state.overlayProc = null;
state.mpvProc = null;
state.youtubeSubgenChildren.clear();
state.overlayManagedByLauncher = false;
state.appPath = '';
state.stopRequested = false;
fs.rmSync(dir, { recursive: true, force: true });
}
});

View File

@@ -9,8 +9,10 @@ import { log, fail, getMpvLogPath } from './log.js';
import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js';
import {
commandExists,
getPathEnv,
isExecutable,
resolveBinaryPathCandidate,
resolveCommandInvocation,
realpathMaybe,
isYoutubeTarget,
uniqueNormalizedLangCodes,
@@ -27,6 +29,11 @@ export const state = {
stopRequested: false,
};
type SpawnTarget = {
command: string;
args: string[];
};
const DETACHED_IDLE_MPV_PID_FILE = path.join(os.tmpdir(), 'subminer-idle-mpv.pid');
const OVERLAY_START_SOCKET_READY_TIMEOUT_MS = 900;
const OVERLAY_START_COMMAND_SETTLE_TIMEOUT_MS = 700;
@@ -199,7 +206,8 @@ export function findAppBinary(selfPath: string): string | null {
if (isExecutable(candidate)) return candidate;
}
const fromPath = process.env.PATH?.split(path.delimiter)
const fromPath = getPathEnv()
.split(path.delimiter)
.map((dir) => path.join(dir, 'subminer'))
.find((candidate) => isExecutable(candidate));
@@ -419,6 +427,20 @@ export async function loadSubtitleIntoMpv(
}
}
export function shouldResolveAniSkipMetadata(
target: string,
targetKind: 'file' | 'url',
preloadedSubtitles?: { primaryPath?: string; secondaryPath?: string },
): boolean {
if (targetKind !== 'file') {
return false;
}
if (preloadedSubtitles?.primaryPath || preloadedSubtitles?.secondaryPath) {
return false;
}
return !isYoutubeTarget(target);
}
export async function startMpv(
target: string,
targetKind: 'file' | 'url',
@@ -456,17 +478,13 @@ export async function startMpv(
log('debug', args.logLevel, `YouTube subtitle langs: ${subtitleLangs}`);
log('debug', args.logLevel, `YouTube audio langs: ${audioLangs}`);
mpvArgs.push(`--ytdl-format=${DEFAULT_YOUTUBE_YTDL_FORMAT}`, `--alang=${audioLangs}`);
if (args.youtubeSubgenMode === 'off') {
mpvArgs.push(
'--sub-auto=fuzzy',
`--slang=${subtitleLangs}`,
'--ytdl-raw-options-append=write-auto-subs=',
'--ytdl-raw-options-append=write-subs=',
'--ytdl-raw-options-append=sub-format=vtt/best',
`--ytdl-raw-options-append=sub-langs=${subtitleLangs}`,
);
}
mpvArgs.push(
'--sub-auto=fuzzy',
`--slang=${subtitleLangs}`,
'--ytdl-raw-options-append=write-subs=',
'--ytdl-raw-options-append=sub-format=vtt/best',
`--ytdl-raw-options-append=sub-langs=${subtitleLangs}`,
);
}
}
@@ -479,8 +497,9 @@ export async function startMpv(
if (options?.startPaused) {
mpvArgs.push('--pause=yes');
}
const aniSkipMetadata =
targetKind === 'file' ? await resolveAniSkipMetadataForFile(target) : null;
const aniSkipMetadata = shouldResolveAniSkipMetadata(target, targetKind, preloadedSubtitles)
? await resolveAniSkipMetadataForFile(target)
: null;
const scriptOpts = buildSubminerScriptOpts(appPath, socketPath, aniSkipMetadata);
if (aniSkipMetadata) {
log(
@@ -501,7 +520,8 @@ export async function startMpv(
mpvArgs.push(`--input-ipc-server=${socketPath}`);
mpvArgs.push(target);
state.mpvProc = spawn('mpv', mpvArgs, { stdio: 'inherit' });
const mpvTarget = resolveCommandInvocation('mpv', mpvArgs);
state.mpvProc = spawn(mpvTarget.command, mpvTarget.args, { stdio: 'inherit' });
}
async function waitForOverlayStartCommandSettled(
@@ -552,7 +572,8 @@ export async function startOverlay(appPath: string, args: Args, socketPath: stri
if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel);
if (args.useTexthooker) overlayArgs.push('--texthooker');
state.overlayProc = spawn(appPath, overlayArgs, {
const target = resolveAppSpawnTarget(appPath, overlayArgs);
state.overlayProc = spawn(target.command, target.args, {
stdio: 'inherit',
env: { ...process.env, SUBMINER_MPV_LOG: getMpvLogPath() },
});
@@ -628,6 +649,29 @@ export function stopOverlay(args: Args): void {
void terminateTrackedDetachedMpv(args.logLevel);
}
export async function cleanupPlaybackSession(args: Args): Promise<void> {
if (state.mpvProc && !state.mpvProc.killed) {
try {
state.mpvProc.kill('SIGTERM');
} catch {
// ignore
}
}
for (const child of state.youtubeSubgenChildren) {
if (!child.killed) {
try {
child.kill('SIGTERM');
} catch {
// ignore
}
}
}
state.youtubeSubgenChildren.clear();
await terminateTrackedDetachedMpv(args.logLevel);
}
function buildAppEnv(): NodeJS.ProcessEnv {
const env: Record<string, string | undefined> = {
...process.env,
@@ -648,8 +692,30 @@ function buildAppEnv(): NodeJS.ProcessEnv {
return env;
}
function maybeCaptureAppArgs(appArgs: string[]): boolean {
const capturePath = process.env.SUBMINER_TEST_CAPTURE?.trim();
if (!capturePath) {
return false;
}
fs.writeFileSync(capturePath, `${appArgs.join('\n')}${appArgs.length > 0 ? '\n' : ''}`, 'utf8');
return true;
}
function resolveAppSpawnTarget(appPath: string, appArgs: string[]): SpawnTarget {
if (process.platform !== 'win32') {
return { command: appPath, args: appArgs };
}
return resolveCommandInvocation(appPath, appArgs);
}
export function runAppCommandWithInherit(appPath: string, appArgs: string[]): never {
const result = spawnSync(appPath, appArgs, {
if (maybeCaptureAppArgs(appArgs)) {
process.exit(0);
}
const target = resolveAppSpawnTarget(appPath, appArgs);
const result = spawnSync(target.command, target.args, {
stdio: 'inherit',
env: buildAppEnv(),
});
@@ -668,7 +734,16 @@ export function runAppCommandCaptureOutput(
stderr: string;
error?: Error;
} {
const result = spawnSync(appPath, appArgs, {
if (maybeCaptureAppArgs(appArgs)) {
return {
status: 0,
stdout: '',
stderr: '',
};
}
const target = resolveAppSpawnTarget(appPath, appArgs);
const result = spawnSync(target.command, target.args, {
env: buildAppEnv(),
encoding: 'utf8',
});
@@ -687,8 +762,17 @@ export function runAppCommandWithInheritLogged(
logLevel: LogLevel,
label: string,
): never {
log('debug', logLevel, `${label}: launching app with args: ${appArgs.join(' ')}`);
const result = spawnSync(appPath, appArgs, {
if (maybeCaptureAppArgs(appArgs)) {
process.exit(0);
}
const target = resolveAppSpawnTarget(appPath, appArgs);
log(
'debug',
logLevel,
`${label}: launching app with args: ${[target.command, ...target.args].join(' ')}`,
);
const result = spawnSync(target.command, target.args, {
stdio: 'inherit',
env: buildAppEnv(),
});
@@ -702,7 +786,11 @@ export function runAppCommandWithInheritLogged(
export function launchAppStartDetached(appPath: string, logLevel: LogLevel): void {
const startArgs = ['--start'];
if (logLevel !== 'info') startArgs.push('--log-level', logLevel);
const proc = spawn(appPath, startArgs, {
if (maybeCaptureAppArgs(startArgs)) {
return;
}
const target = resolveAppSpawnTarget(appPath, startArgs);
const proc = spawn(target.command, target.args, {
stdio: 'ignore',
detached: true,
env: buildAppEnv(),
@@ -732,7 +820,8 @@ export function launchMpvIdleDetached(
);
mpvArgs.push(`--log-file=${getMpvLogPath()}`);
mpvArgs.push(`--input-ipc-server=${socketPath}`);
const proc = spawn('mpv', mpvArgs, {
const mpvTarget = resolveCommandInvocation('mpv', mpvArgs);
const proc = spawn(mpvTarget.command, mpvTarget.args, {
stdio: 'ignore',
detached: true,
});

View File

@@ -7,22 +7,26 @@ test('waitForSetupCompletion resolves completed and cancelled states', async ()
const sequence: Array<SetupState | null> = [
null,
{
version: 1,
version: 2,
status: 'in_progress',
completedAt: null,
completionSource: null,
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'unknown',
pluginInstallPathSummary: null,
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
windowsMpvShortcutLastStatus: 'unknown',
},
{
version: 1,
version: 2,
status: 'completed',
completedAt: '2026-03-07T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 1,
pluginInstallStatus: 'skipped',
pluginInstallPathSummary: null,
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
windowsMpvShortcutLastStatus: 'skipped',
},
];
@@ -50,23 +54,27 @@ test('ensureLauncherSetupReady launches setup app and resumes only after complet
if (reads === 1) return null;
if (reads === 2) {
return {
version: 1,
version: 2,
status: 'in_progress',
completedAt: null,
completionSource: null,
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'unknown',
pluginInstallPathSummary: null,
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
windowsMpvShortcutLastStatus: 'unknown',
};
}
return {
version: 1,
version: 2,
status: 'completed',
completedAt: '2026-03-07T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 1,
pluginInstallStatus: 'installed',
pluginInstallPathSummary: '/tmp/mpv',
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
windowsMpvShortcutLastStatus: 'installed',
};
},
launchSetupApp: () => {
@@ -88,13 +96,15 @@ test('ensureLauncherSetupReady launches setup app and resumes only after complet
test('ensureLauncherSetupReady fails on timeout/cancelled state', async () => {
const result = await ensureLauncherSetupReady({
readSetupState: () => ({
version: 1,
version: 2,
status: 'cancelled',
completedAt: null,
completionSource: null,
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'unknown',
pluginInstallPathSummary: null,
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
windowsMpvShortcutLastStatus: 'unknown',
}),
launchSetupApp: () => undefined,
sleep: async () => undefined,

View File

@@ -295,7 +295,7 @@ test('launcher mpv status returns ready when socket is connectable', async () =>
});
test(
'launcher start-overlay run forwards socket/backend and stops overlay after mpv exits',
'launcher start-overlay run forwards socket/backend and keeps background app alive after mpv exits',
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
async () => {
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
@@ -310,7 +310,6 @@ test(
const appStartPath = path.join(smokeCase.artifactsDir, 'fake-app-start.log');
const appStopPath = path.join(smokeCase.artifactsDir, 'fake-app-stop.log');
await waitForJsonLines(appStartPath, 1);
await waitForJsonLines(appStopPath, 1);
const appStartEntries = readJsonLines(appStartPath);
const appStopEntries = readJsonLines(appStopPath);
@@ -325,7 +324,7 @@ test(
assert.match(result.stdout, /Starting SubMiner overlay/i);
assert.equal(appStartEntries.length, 1);
assert.equal(appStopEntries.length, 1);
assert.equal(appStopEntries.length, 0);
assert.equal(mpvEntries.length >= 1, true);
const appStartArgs = appStartEntries[0]?.argv;
@@ -337,9 +336,6 @@ test(
assert.equal((appStartArgs as string[]).includes(smokeCase.socketPath), true);
assert.equal(appStartEntries[0]?.subminerMpvLog, smokeCase.mpvOverlayLogPath);
const appStopArgs = appStopEntries[0]?.argv;
assert.deepEqual(appStopArgs, ['--stop']);
const mpvFirstArgs = mpvEntries[0]?.argv;
assert.equal(Array.isArray(mpvFirstArgs), true);
assert.equal(

View File

@@ -3,7 +3,14 @@ import os from 'node:os';
export { VIDEO_EXTENSIONS } from '../src/shared/video-extensions.js';
export const ROFI_THEME_FILE = 'subminer.rasi';
export const DEFAULT_SOCKET_PATH = '/tmp/subminer-socket';
export function getDefaultSocketPath(platform: NodeJS.Platform = process.platform): string {
if (platform === 'win32') {
return '\\\\.\\pipe\\subminer-socket';
}
return '/tmp/subminer-socket';
}
export const DEFAULT_SOCKET_PATH = getDefaultSocketPath();
export const DEFAULT_YOUTUBE_PRIMARY_SUB_LANGS = ['ja', 'jpn'];
export const DEFAULT_YOUTUBE_SECONDARY_SUB_LANGS = ['en', 'eng'];
export const YOUTUBE_SUB_EXTENSIONS = new Set(['.srt', '.vtt', '.ass']);
@@ -22,13 +29,21 @@ export const DEFAULT_YOUTUBE_SUBGEN_OUT_DIR = path.join(
'subminer',
'youtube-subs',
);
export const DEFAULT_MPV_LOG_FILE = path.join(
os.homedir(),
'.config',
'SubMiner',
'logs',
`SubMiner-${new Date().toISOString().slice(0, 10)}.log`,
);
export function getDefaultMpvLogFile(options?: {
platform?: NodeJS.Platform;
homeDir?: string;
appDataDir?: string;
}): string {
const platform = options?.platform ?? process.platform;
const homeDir = options?.homeDir ?? os.homedir();
const baseDir =
platform === 'win32'
? path.join(options?.appDataDir?.trim() || path.join(homeDir, 'AppData', 'Roaming'), 'SubMiner')
: path.join(homeDir, '.config', 'SubMiner');
return path.join(baseDir, 'logs', `SubMiner-${new Date().toISOString().slice(0, 10)}.log`);
}
export const DEFAULT_MPV_LOG_FILE = getDefaultMpvLogFile();
export const DEFAULT_YOUTUBE_YTDL_FORMAT = 'bestvideo*+bestaudio/best';
export const DEFAULT_JIMAKU_API_BASE_URL = 'https://jimaku.cc';
export const DEFAULT_MPV_SUBMINER_ARGS = [
@@ -42,26 +57,38 @@ export const DEFAULT_MPV_SUBMINER_ARGS = [
] as const;
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
export type YoutubeSubgenMode = 'automatic' | 'preprocess' | 'off';
export type Backend = 'auto' | 'hyprland' | 'x11' | 'macos';
export type JimakuLanguagePreference = 'ja' | 'en' | 'none';
export interface LauncherAiConfig {
enabled?: boolean;
apiKey?: string;
apiKeyCommand?: string;
baseUrl?: string;
model?: string;
systemPrompt?: string;
requestTimeoutMs?: number;
}
export interface Args {
backend: Backend;
directory: string;
recursive: boolean;
profile: string;
startOverlay: boolean;
youtubeSubgenMode: YoutubeSubgenMode;
whisperBin: string;
whisperModel: string;
whisperVadModel: string;
whisperThreads: number;
youtubeSubgenOutDir: string;
youtubeSubgenAudioFormat: string;
youtubeSubgenKeepTemp: boolean;
youtubeFixWithAi: boolean;
youtubePrimarySubLangs: string[];
youtubeSecondarySubLangs: string[];
youtubeAudioLangs: string[];
youtubeWhisperSourceLanguage: string;
aiConfig: LauncherAiConfig;
useTexthooker: boolean;
autoStartOverlay: boolean;
texthookerOnly: boolean;
@@ -96,9 +123,12 @@ export interface Args {
}
export interface LauncherYoutubeSubgenConfig {
mode?: YoutubeSubgenMode;
whisperBin?: string;
whisperModel?: string;
whisperVadModel?: string;
whisperThreads?: number;
fixWithAi?: boolean;
ai?: LauncherAiConfig;
primarySubLanguages?: string[];
secondarySubLanguages?: string[];
jimakuApiKey?: string;
@@ -144,13 +174,15 @@ export interface SubtitleCandidate {
lang: 'primary' | 'secondary';
ext: string;
size: number;
source: 'manual' | 'auto' | 'whisper' | 'whisper-translate';
source: 'manual' | 'whisper' | 'whisper-fixed' | 'whisper-translate' | 'whisper-translate-fixed';
}
export interface YoutubeSubgenOutputs {
basename: string;
primaryPath?: string;
secondaryPath?: string;
primaryNative?: boolean;
secondaryNative?: boolean;
}
export interface MpvTrack {

View File

@@ -18,14 +18,139 @@ export function isExecutable(filePath: string): boolean {
}
}
export function commandExists(command: string): boolean {
const pathEnv = process.env.PATH ?? '';
function isRunnableFile(filePath: string): boolean {
try {
if (!fs.statSync(filePath).isFile()) return false;
return process.platform === 'win32' ? true : isExecutable(filePath);
} catch {
return false;
}
}
function isPathLikeCommand(command: string): boolean {
return (
command.includes('/') ||
command.includes('\\') ||
/^[A-Za-z]:[\\/]/.test(command) ||
command.startsWith('.')
);
}
function getWindowsPathExts(): string[] {
const raw = process.env.PATHEXT ?? '.COM;.EXE;.BAT;.CMD';
return raw
.split(';')
.map((entry) => entry.trim())
.filter((entry) => entry.length > 0);
}
export function getPathEnv(): string {
const pathKey = Object.keys(process.env).find((key) => key.toLowerCase() === 'path');
return pathKey ? (process.env[pathKey] ?? '') : '';
}
function resolveExecutablePath(command: string): string | null {
const tryCandidate = (candidate: string): string | null =>
isRunnableFile(candidate) ? candidate : null;
const resolveWindowsCandidate = (candidate: string): string | null => {
const direct = tryCandidate(candidate);
if (direct) return direct;
if (path.extname(candidate)) return null;
for (const ext of getWindowsPathExts()) {
const withExt = tryCandidate(`${candidate}${ext}`);
if (withExt) return withExt;
}
return null;
};
if (isPathLikeCommand(command)) {
const resolved = path.resolve(resolvePathMaybe(command));
return process.platform === 'win32' ? resolveWindowsCandidate(resolved) : tryCandidate(resolved);
}
const pathEnv = getPathEnv();
for (const dir of pathEnv.split(path.delimiter)) {
if (!dir) continue;
const full = path.join(dir, command);
if (isExecutable(full)) return true;
const candidate = path.join(dir, command);
const resolved =
process.platform === 'win32' ? resolveWindowsCandidate(candidate) : tryCandidate(candidate);
if (resolved) return resolved;
}
return false;
return null;
}
function normalizeWindowsBashArg(value: string): string {
const normalized = value.replace(/\\/g, '/');
const driveMatch = normalized.match(/^([A-Za-z]):\/(.*)$/);
if (!driveMatch) {
return normalized;
}
const [, driveLetter, remainder] = driveMatch;
return `/mnt/${driveLetter!.toLowerCase()}/${remainder}`;
}
function resolveGitBashExecutable(): string | null {
const directCandidates = [
'C:\\Program Files\\Git\\bin\\bash.exe',
'C:\\Program Files\\Git\\usr\\bin\\bash.exe',
];
for (const candidate of directCandidates) {
if (isRunnableFile(candidate)) return candidate;
}
const gitExecutable = resolveExecutablePath('git');
if (!gitExecutable) return null;
const gitDir = path.dirname(gitExecutable);
const inferredCandidates = [
path.resolve(gitDir, '..', 'bin', 'bash.exe'),
path.resolve(gitDir, '..', 'usr', 'bin', 'bash.exe'),
];
for (const candidate of inferredCandidates) {
if (isRunnableFile(candidate)) return candidate;
}
return null;
}
function resolveWindowsBashTarget(): {
command: string;
flavor: 'git' | 'wsl';
} {
const gitBash = resolveGitBashExecutable();
if (gitBash) {
return { command: gitBash, flavor: 'git' };
}
return {
command: resolveExecutablePath('bash') ?? 'bash',
flavor: 'wsl',
};
}
function normalizeWindowsShellArg(value: string, flavor: 'git' | 'wsl'): string {
if (!isPathLikeCommand(value)) {
return value;
}
return flavor === 'git' ? value.replace(/\\/g, '/') : normalizeWindowsBashArg(value);
}
function readShebang(filePath: string): string {
try {
const fd = fs.openSync(filePath, 'r');
try {
const buffer = Buffer.alloc(160);
const bytesRead = fs.readSync(fd, buffer, 0, buffer.length, 0);
return buffer.toString('utf8', 0, bytesRead).split(/\r?\n/, 1)[0] ?? '';
} finally {
fs.closeSync(fd);
}
} catch {
return '';
}
}
export function commandExists(command: string): boolean {
return resolveExecutablePath(command) !== null;
}
export function resolvePathMaybe(input: string): string {
@@ -116,6 +241,51 @@ export function inferWhisperLanguage(langCodes: string[], fallback: string): str
return fallback;
}
export function resolveCommandInvocation(
executable: string,
args: string[],
): { command: string; args: string[] } {
if (process.platform !== 'win32') {
return { command: executable, args };
}
const resolvedExecutable = resolveExecutablePath(executable) ?? executable;
const extension = path.extname(resolvedExecutable).toLowerCase();
if (extension === '.ps1') {
return {
command: 'powershell.exe',
args: ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', resolvedExecutable, ...args],
};
}
if (extension === '.sh') {
const bashTarget = resolveWindowsBashTarget();
return {
command: bashTarget.command,
args: [
normalizeWindowsShellArg(resolvedExecutable, bashTarget.flavor),
...args.map((arg) => normalizeWindowsShellArg(arg, bashTarget.flavor)),
],
};
}
if (!extension) {
const shebang = readShebang(resolvedExecutable);
if (/^#!.*\b(?:sh|bash)\b/i.test(shebang)) {
const bashTarget = resolveWindowsBashTarget();
return {
command: bashTarget.command,
args: [
normalizeWindowsShellArg(resolvedExecutable, bashTarget.flavor),
...args.map((arg) => normalizeWindowsShellArg(arg, bashTarget.flavor)),
],
};
}
}
return { command: resolvedExecutable, args };
}
export function runExternalCommand(
executable: string,
args: string[],
@@ -129,8 +299,13 @@ export function runExternalCommand(
const streamOutput = opts.streamOutput === true;
return new Promise((resolve, reject) => {
log('debug', configuredLogLevel, `[${commandLabel}] spawn: ${executable} ${args.join(' ')}`);
const child = spawn(executable, args, {
const target = resolveCommandInvocation(executable, args);
log(
'debug',
configuredLogLevel,
`[${commandLabel}] spawn: ${target.command} ${target.args.join(' ')}`,
);
const child = spawn(target.command, target.args, {
stdio: ['ignore', 'pipe', 'pipe'],
env: { ...process.env, ...opts.env },
});
@@ -201,7 +376,7 @@ export function runExternalCommand(
`[${commandLabel}] exit code ${code ?? 1}`,
);
if (code !== 0 && !allowFailure) {
const commandString = `${executable} ${args.join(' ')}`;
const commandString = `${target.command} ${target.args.join(' ')}`;
reject(
new Error(`Command failed (${commandString}): ${stderr.trim() || `exit code ${code}`}`),
);

View File

@@ -1,467 +1 @@
import fs from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import type { Args, SubtitleCandidate, YoutubeSubgenOutputs } from './types.js';
import { YOUTUBE_SUB_EXTENSIONS, YOUTUBE_AUDIO_EXTENSIONS } from './types.js';
import { log } from './log.js';
import {
resolvePathMaybe,
uniqueNormalizedLangCodes,
escapeRegExp,
normalizeBasename,
runExternalCommand,
commandExists,
} from './util.js';
import { state } from './mpv.js';
function toYtdlpLangPattern(langCodes: string[]): string {
return langCodes.map((lang) => `${lang}.*`).join(',');
}
function filenameHasLanguageTag(filenameLower: string, langCode: string): boolean {
const escaped = escapeRegExp(langCode);
const pattern = new RegExp(`(^|[._-])${escaped}([._-]|$)`);
return pattern.test(filenameLower);
}
function classifyLanguage(
filename: string,
primaryLangCodes: string[],
secondaryLangCodes: string[],
): 'primary' | 'secondary' | null {
const lower = filename.toLowerCase();
const primary = primaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
const secondary = secondaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
if (primary && !secondary) return 'primary';
if (secondary && !primary) return 'secondary';
return null;
}
function preferredLangLabel(langCodes: string[], fallback: string): string {
return uniqueNormalizedLangCodes(langCodes)[0] || fallback;
}
function sourceTag(source: SubtitleCandidate['source']): string {
if (source === 'manual' || source === 'auto') return `ytdlp-${source}`;
if (source === 'whisper-translate') return 'whisper-translate';
return 'whisper';
}
function pickBestCandidate(candidates: SubtitleCandidate[]): SubtitleCandidate | null {
if (candidates.length === 0) return null;
const scored = [...candidates].sort((a, b) => {
const sourceA = a.source === 'manual' ? 1 : 0;
const sourceB = b.source === 'manual' ? 1 : 0;
if (sourceA !== sourceB) return sourceB - sourceA;
const srtA = a.ext === '.srt' ? 1 : 0;
const srtB = b.ext === '.srt' ? 1 : 0;
if (srtA !== srtB) return srtB - srtA;
return b.size - a.size;
});
return scored[0] ?? null;
}
function scanSubtitleCandidates(
tempDir: string,
knownSet: Set<string>,
source: 'manual' | 'auto',
primaryLangCodes: string[],
secondaryLangCodes: string[],
): SubtitleCandidate[] {
const entries = fs.readdirSync(tempDir);
const out: SubtitleCandidate[] = [];
for (const name of entries) {
const fullPath = path.join(tempDir, name);
if (knownSet.has(fullPath)) continue;
let stat: fs.Stats;
try {
stat = fs.statSync(fullPath);
} catch {
continue;
}
if (!stat.isFile()) continue;
const ext = path.extname(fullPath).toLowerCase();
if (!YOUTUBE_SUB_EXTENSIONS.has(ext)) continue;
const lang = classifyLanguage(name, primaryLangCodes, secondaryLangCodes);
if (!lang) continue;
out.push({ path: fullPath, lang, ext, size: stat.size, source });
}
return out;
}
async function convertToSrt(
inputPath: string,
tempDir: string,
langLabel: string,
): Promise<string> {
if (path.extname(inputPath).toLowerCase() === '.srt') return inputPath;
const outputPath = path.join(tempDir, `converted.${langLabel}.srt`);
await runExternalCommand('ffmpeg', ['-y', '-loglevel', 'error', '-i', inputPath, outputPath]);
return outputPath;
}
function findAudioFile(tempDir: string, preferredExt: string): string | null {
const entries = fs.readdirSync(tempDir);
const audioFiles: Array<{ path: string; ext: string; mtimeMs: number }> = [];
for (const name of entries) {
const fullPath = path.join(tempDir, name);
let stat: fs.Stats;
try {
stat = fs.statSync(fullPath);
} catch {
continue;
}
if (!stat.isFile()) continue;
const ext = path.extname(name).toLowerCase();
if (!YOUTUBE_AUDIO_EXTENSIONS.has(ext)) continue;
audioFiles.push({ path: fullPath, ext, mtimeMs: stat.mtimeMs });
}
if (audioFiles.length === 0) return null;
const preferred = audioFiles.find((entry) => entry.ext === `.${preferredExt.toLowerCase()}`);
if (preferred) return preferred.path;
audioFiles.sort((a, b) => b.mtimeMs - a.mtimeMs);
return audioFiles[0]?.path ?? null;
}
async function runWhisper(
whisperBin: string,
modelPath: string,
audioPath: string,
language: string,
translate: boolean,
outputPrefix: string,
): Promise<string> {
const args = [
'-m',
modelPath,
'-f',
audioPath,
'--output-srt',
'--output-file',
outputPrefix,
'--language',
language,
];
if (translate) args.push('--translate');
await runExternalCommand(whisperBin, args, {
commandLabel: 'whisper',
streamOutput: true,
});
const outputPath = `${outputPrefix}.srt`;
if (!fs.existsSync(outputPath)) {
throw new Error(`whisper output not found: ${outputPath}`);
}
return outputPath;
}
async function convertAudioForWhisper(inputPath: string, tempDir: string): Promise<string> {
const wavPath = path.join(tempDir, 'whisper-input.wav');
await runExternalCommand('ffmpeg', [
'-y',
'-loglevel',
'error',
'-i',
inputPath,
'-ar',
'16000',
'-ac',
'1',
'-c:a',
'pcm_s16le',
wavPath,
]);
if (!fs.existsSync(wavPath)) {
throw new Error(`Failed to prepare whisper audio input: ${wavPath}`);
}
return wavPath;
}
export function resolveWhisperBinary(args: Args): string | null {
const explicit = args.whisperBin.trim();
if (explicit) return resolvePathMaybe(explicit);
if (commandExists('whisper-cli')) return 'whisper-cli';
return null;
}
export async function generateYoutubeSubtitles(
target: string,
args: Args,
onReady?: (lang: 'primary' | 'secondary', pathToLoad: string) => Promise<void>,
): Promise<YoutubeSubgenOutputs> {
const outDir = path.resolve(resolvePathMaybe(args.youtubeSubgenOutDir));
fs.mkdirSync(outDir, { recursive: true });
const primaryLangCodes = uniqueNormalizedLangCodes(args.youtubePrimarySubLangs);
const secondaryLangCodes = uniqueNormalizedLangCodes(args.youtubeSecondarySubLangs);
const primaryLabel = preferredLangLabel(primaryLangCodes, 'primary');
const secondaryLabel = preferredLangLabel(secondaryLangCodes, 'secondary');
const secondaryCanUseWhisperTranslate =
secondaryLangCodes.includes('en') || secondaryLangCodes.includes('eng');
const ytdlpManualLangs = toYtdlpLangPattern([...primaryLangCodes, ...secondaryLangCodes]);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yt-subgen-'));
const knownFiles = new Set<string>();
let keepTemp = args.youtubeSubgenKeepTemp;
const publishTrack = async (
lang: 'primary' | 'secondary',
source: SubtitleCandidate['source'],
selectedPath: string,
basename: string,
): Promise<string> => {
const langLabel = lang === 'primary' ? primaryLabel : secondaryLabel;
const taggedPath = path.join(outDir, `${basename}.${langLabel}.${sourceTag(source)}.srt`);
const aliasPath = path.join(outDir, `${basename}.${langLabel}.srt`);
fs.copyFileSync(selectedPath, taggedPath);
fs.copyFileSync(taggedPath, aliasPath);
log('info', args.logLevel, `Generated subtitle (${langLabel}, ${source}) -> ${aliasPath}`);
if (onReady) await onReady(lang, aliasPath);
return aliasPath;
};
try {
log('debug', args.logLevel, `YouTube subtitle temp dir: ${tempDir}`);
const meta = await runExternalCommand(
'yt-dlp',
['--dump-single-json', '--no-warnings', target],
{
captureStdout: true,
logLevel: args.logLevel,
commandLabel: 'yt-dlp:meta',
},
state.youtubeSubgenChildren,
);
const metadata = JSON.parse(meta.stdout) as { id?: string };
const videoId = metadata.id || `${Date.now()}`;
const basename = normalizeBasename(videoId, videoId);
await runExternalCommand(
'yt-dlp',
[
'--skip-download',
'--no-warnings',
'--write-subs',
'--sub-format',
'srt/vtt/best',
'--sub-langs',
ytdlpManualLangs,
'-o',
path.join(tempDir, '%(id)s.%(ext)s'),
target,
],
{
allowFailure: true,
logLevel: args.logLevel,
commandLabel: 'yt-dlp:manual-subs',
streamOutput: true,
},
state.youtubeSubgenChildren,
);
const manualSubs = scanSubtitleCandidates(
tempDir,
knownFiles,
'manual',
primaryLangCodes,
secondaryLangCodes,
);
for (const sub of manualSubs) knownFiles.add(sub.path);
let primaryCandidates = manualSubs.filter((entry) => entry.lang === 'primary');
let secondaryCandidates = manualSubs.filter((entry) => entry.lang === 'secondary');
const missingAuto: string[] = [];
if (primaryCandidates.length === 0) missingAuto.push(toYtdlpLangPattern(primaryLangCodes));
if (secondaryCandidates.length === 0) missingAuto.push(toYtdlpLangPattern(secondaryLangCodes));
if (missingAuto.length > 0) {
await runExternalCommand(
'yt-dlp',
[
'--skip-download',
'--no-warnings',
'--write-auto-subs',
'--sub-format',
'srt/vtt/best',
'--sub-langs',
missingAuto.join(','),
'-o',
path.join(tempDir, '%(id)s.%(ext)s'),
target,
],
{
allowFailure: true,
logLevel: args.logLevel,
commandLabel: 'yt-dlp:auto-subs',
streamOutput: true,
},
state.youtubeSubgenChildren,
);
const autoSubs = scanSubtitleCandidates(
tempDir,
knownFiles,
'auto',
primaryLangCodes,
secondaryLangCodes,
);
for (const sub of autoSubs) knownFiles.add(sub.path);
primaryCandidates = primaryCandidates.concat(
autoSubs.filter((entry) => entry.lang === 'primary'),
);
secondaryCandidates = secondaryCandidates.concat(
autoSubs.filter((entry) => entry.lang === 'secondary'),
);
}
let primaryAlias = '';
let secondaryAlias = '';
const selectedPrimary = pickBestCandidate(primaryCandidates);
const selectedSecondary = pickBestCandidate(secondaryCandidates);
if (selectedPrimary) {
const srt = await convertToSrt(selectedPrimary.path, tempDir, primaryLabel);
primaryAlias = await publishTrack('primary', selectedPrimary.source, srt, basename);
}
if (selectedSecondary) {
const srt = await convertToSrt(selectedSecondary.path, tempDir, secondaryLabel);
secondaryAlias = await publishTrack('secondary', selectedSecondary.source, srt, basename);
}
const needsPrimaryWhisper = !selectedPrimary;
const needsSecondaryWhisper = !selectedSecondary && secondaryCanUseWhisperTranslate;
if (needsPrimaryWhisper || needsSecondaryWhisper) {
const whisperBin = resolveWhisperBinary(args);
const modelPath = args.whisperModel.trim()
? path.resolve(resolvePathMaybe(args.whisperModel.trim()))
: '';
const hasWhisperFallback = !!whisperBin && !!modelPath && fs.existsSync(modelPath);
if (!hasWhisperFallback) {
log(
'warn',
args.logLevel,
'Whisper fallback is not configured; continuing with available subtitle tracks.',
);
} else {
try {
await runExternalCommand(
'yt-dlp',
[
'-f',
'bestaudio/best',
'--extract-audio',
'--audio-format',
args.youtubeSubgenAudioFormat,
'--no-warnings',
'-o',
path.join(tempDir, '%(id)s.%(ext)s'),
target,
],
{
logLevel: args.logLevel,
commandLabel: 'yt-dlp:audio',
streamOutput: true,
},
state.youtubeSubgenChildren,
);
const audioPath = findAudioFile(tempDir, args.youtubeSubgenAudioFormat);
if (!audioPath) {
throw new Error('Audio extraction succeeded, but no audio file was found.');
}
const whisperAudioPath = await convertAudioForWhisper(audioPath, tempDir);
if (needsPrimaryWhisper) {
try {
const primaryPrefix = path.join(tempDir, `${basename}.${primaryLabel}`);
const primarySrt = await runWhisper(
whisperBin!,
modelPath,
whisperAudioPath,
args.youtubeWhisperSourceLanguage,
false,
primaryPrefix,
);
primaryAlias = await publishTrack('primary', 'whisper', primarySrt, basename);
} catch (error) {
log(
'warn',
args.logLevel,
`Failed to generate primary subtitle via whisper fallback: ${(error as Error).message}`,
);
}
}
if (needsSecondaryWhisper) {
try {
const secondaryPrefix = path.join(tempDir, `${basename}.${secondaryLabel}`);
const secondarySrt = await runWhisper(
whisperBin!,
modelPath,
whisperAudioPath,
args.youtubeWhisperSourceLanguage,
true,
secondaryPrefix,
);
secondaryAlias = await publishTrack(
'secondary',
'whisper-translate',
secondarySrt,
basename,
);
} catch (error) {
log(
'warn',
args.logLevel,
`Failed to generate secondary subtitle via whisper fallback: ${(error as Error).message}`,
);
}
}
} catch (error) {
log(
'warn',
args.logLevel,
`Whisper fallback pipeline failed: ${(error as Error).message}`,
);
}
}
}
if (!secondaryCanUseWhisperTranslate && !selectedSecondary) {
log(
'warn',
args.logLevel,
`Secondary subtitle language (${secondaryLabel}) has no whisper translate fallback; relying on yt-dlp subtitles only.`,
);
}
if (!primaryAlias && !secondaryAlias) {
throw new Error('Failed to generate any subtitle tracks.');
}
if (!primaryAlias || !secondaryAlias) {
log(
'warn',
args.logLevel,
`Generated partial subtitle result: primary=${primaryAlias ? 'ok' : 'missing'}, secondary=${secondaryAlias ? 'ok' : 'missing'}`,
);
}
return {
basename,
primaryPath: primaryAlias || undefined,
secondaryPath: secondaryAlias || undefined,
};
} catch (error) {
keepTemp = true;
throw error;
} finally {
if (keepTemp) {
log('warn', args.logLevel, `Keeping subtitle temp dir: ${tempDir}`);
} else {
try {
fs.rmSync(tempDir, { recursive: true, force: true });
} catch {
// ignore cleanup failures
}
}
}
}
export { generateYoutubeSubtitles, resolveWhisperBinary } from './youtube/orchestrator.js';

View File

@@ -0,0 +1,84 @@
import fs from 'node:fs';
import path from 'node:path';
import type { Args } from '../types.js';
import { YOUTUBE_AUDIO_EXTENSIONS } from '../types.js';
import { runExternalCommand } from '../util.js';
export function findAudioFile(tempDir: string, preferredExt: string): string | null {
const entries = fs.readdirSync(tempDir);
const audioFiles: Array<{ path: string; ext: string; mtimeMs: number }> = [];
for (const name of entries) {
const fullPath = path.join(tempDir, name);
let stat: fs.Stats;
try {
stat = fs.statSync(fullPath);
} catch {
continue;
}
if (!stat.isFile()) continue;
const ext = path.extname(name).toLowerCase();
if (!YOUTUBE_AUDIO_EXTENSIONS.has(ext)) continue;
audioFiles.push({ path: fullPath, ext, mtimeMs: stat.mtimeMs });
}
if (audioFiles.length === 0) return null;
const preferred = audioFiles.find((entry) => entry.ext === `.${preferredExt.toLowerCase()}`);
if (preferred) return preferred.path;
audioFiles.sort((a, b) => b.mtimeMs - a.mtimeMs);
return audioFiles[0]?.path ?? null;
}
export async function convertAudioForWhisper(inputPath: string, tempDir: string): Promise<string> {
const wavPath = path.join(tempDir, 'whisper-input.wav');
await runExternalCommand('ffmpeg', [
'-y',
'-loglevel',
'error',
'-i',
inputPath,
'-ar',
'16000',
'-ac',
'1',
'-c:a',
'pcm_s16le',
wavPath,
]);
if (!fs.existsSync(wavPath)) {
throw new Error(`Failed to prepare whisper audio input: ${wavPath}`);
}
return wavPath;
}
export async function downloadYoutubeAudio(
target: string,
args: Args,
tempDir: string,
childTracker?: Set<ReturnType<typeof import('node:child_process').spawn>>,
): Promise<string> {
await runExternalCommand(
'yt-dlp',
[
'-f',
'bestaudio/best',
'--extract-audio',
'--audio-format',
args.youtubeSubgenAudioFormat,
'--no-warnings',
'-o',
path.join(tempDir, '%(id)s.%(ext)s'),
target,
],
{
logLevel: args.logLevel,
commandLabel: 'yt-dlp:audio',
streamOutput: true,
},
childTracker,
);
const audioPath = findAudioFile(tempDir, args.youtubeSubgenAudioFormat);
if (!audioPath) {
throw new Error('Audio extraction succeeded, but no audio file was found.');
}
return audioPath;
}

View File

@@ -0,0 +1,99 @@
import fs from 'node:fs';
import path from 'node:path';
import type { SubtitleCandidate } from '../types.js';
import { YOUTUBE_SUB_EXTENSIONS } from '../types.js';
import { escapeRegExp, runExternalCommand } from '../util.js';
function filenameHasLanguageTag(filenameLower: string, langCode: string): boolean {
const escaped = escapeRegExp(langCode);
const pattern = new RegExp(`(^|[._-])${escaped}([._-]|$)`);
return pattern.test(filenameLower);
}
function classifyLanguage(
filename: string,
primaryLangCodes: string[],
secondaryLangCodes: string[],
): 'primary' | 'secondary' | null {
const lower = filename.toLowerCase();
const primary = primaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
const secondary = secondaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
if (primary && !secondary) return 'primary';
if (secondary && !primary) return 'secondary';
return null;
}
export function toYtdlpLangPattern(langCodes: string[]): string {
return langCodes.map((lang) => `${lang}.*`).join(',');
}
export function scanSubtitleCandidates(
tempDir: string,
knownSet: Set<string>,
source: SubtitleCandidate['source'],
primaryLangCodes: string[],
secondaryLangCodes: string[],
): SubtitleCandidate[] {
const entries = fs.readdirSync(tempDir);
const out: SubtitleCandidate[] = [];
for (const name of entries) {
const fullPath = path.join(tempDir, name);
if (knownSet.has(fullPath)) continue;
let stat: fs.Stats;
try {
stat = fs.statSync(fullPath);
} catch {
continue;
}
if (!stat.isFile()) continue;
const ext = path.extname(fullPath).toLowerCase();
if (!YOUTUBE_SUB_EXTENSIONS.has(ext)) continue;
const lang = classifyLanguage(name, primaryLangCodes, secondaryLangCodes);
if (!lang) continue;
out.push({ path: fullPath, lang, ext, size: stat.size, source });
}
return out;
}
export function pickBestCandidate(candidates: SubtitleCandidate[]): SubtitleCandidate | null {
if (candidates.length === 0) return null;
const scored = [...candidates].sort((a, b) => {
const srtA = a.ext === '.srt' ? 1 : 0;
const srtB = b.ext === '.srt' ? 1 : 0;
if (srtA !== srtB) return srtB - srtA;
return b.size - a.size;
});
return scored[0] ?? null;
}
export async function downloadManualSubtitles(
target: string,
tempDir: string,
langPattern: string,
logLevel: import('../types.js').LogLevel,
childTracker?: Set<ReturnType<typeof import('node:child_process').spawn>>,
): Promise<void> {
await runExternalCommand(
'yt-dlp',
[
'--skip-download',
'--no-warnings',
'--write-subs',
'--sub-format',
'srt/vtt/best',
'--sub-langs',
langPattern,
'-o',
path.join(tempDir, '%(id)s.%(ext)s'),
target,
],
{
allowFailure: true,
logLevel,
commandLabel: 'yt-dlp:manual-subs',
streamOutput: true,
},
childTracker,
);
}

View File

@@ -0,0 +1,58 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { planYoutubeSubtitleGeneration } from './orchestrator';
test('planYoutubeSubtitleGeneration prefers manual subtitles and never schedules auto-subs', () => {
assert.deepEqual(
planYoutubeSubtitleGeneration({
hasPrimaryManualSubtitle: true,
hasSecondaryManualSubtitle: false,
secondaryCanTranslate: true,
}),
{
fetchManualSubtitles: true,
fetchAutoSubtitles: false,
publishPrimaryManualSubtitle: false,
publishSecondaryManualSubtitle: false,
generatePrimarySubtitle: false,
generateSecondarySubtitle: true,
},
);
});
test('planYoutubeSubtitleGeneration generates only missing tracks', () => {
assert.deepEqual(
planYoutubeSubtitleGeneration({
hasPrimaryManualSubtitle: false,
hasSecondaryManualSubtitle: true,
secondaryCanTranslate: true,
}),
{
fetchManualSubtitles: true,
fetchAutoSubtitles: false,
publishPrimaryManualSubtitle: false,
publishSecondaryManualSubtitle: false,
generatePrimarySubtitle: true,
generateSecondarySubtitle: false,
},
);
});
test('planYoutubeSubtitleGeneration reuses manual tracks already present on the YouTube video', () => {
assert.deepEqual(
planYoutubeSubtitleGeneration({
hasPrimaryManualSubtitle: true,
hasSecondaryManualSubtitle: true,
secondaryCanTranslate: true,
}),
{
fetchManualSubtitles: true,
fetchAutoSubtitles: false,
publishPrimaryManualSubtitle: false,
publishSecondaryManualSubtitle: false,
generatePrimarySubtitle: false,
generateSecondarySubtitle: false,
},
);
});

View File

@@ -0,0 +1,367 @@
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import type { Args, SubtitleCandidate, YoutubeSubgenOutputs } from '../types.js';
import { log } from '../log.js';
import {
commandExists,
normalizeBasename,
resolvePathMaybe,
runExternalCommand,
uniqueNormalizedLangCodes,
} from '../util.js';
import { state } from '../mpv.js';
import { downloadYoutubeAudio, convertAudioForWhisper } from './audio-extraction.js';
import {
downloadManualSubtitles,
pickBestCandidate,
scanSubtitleCandidates,
toYtdlpLangPattern,
} from './manual-subs.js';
import { runLoggedYoutubePhase } from './progress.js';
import { fixSubtitleWithAi } from './subtitle-fix-ai.js';
import { runWhisper } from './whisper.js';
export interface YoutubeSubtitleGenerationPlan {
fetchManualSubtitles: true;
fetchAutoSubtitles: false;
publishPrimaryManualSubtitle: false;
publishSecondaryManualSubtitle: false;
generatePrimarySubtitle: boolean;
generateSecondarySubtitle: boolean;
}
export function planYoutubeSubtitleGeneration(input: {
hasPrimaryManualSubtitle: boolean;
hasSecondaryManualSubtitle: boolean;
secondaryCanTranslate: boolean;
}): YoutubeSubtitleGenerationPlan {
return {
fetchManualSubtitles: true,
fetchAutoSubtitles: false,
publishPrimaryManualSubtitle: false,
publishSecondaryManualSubtitle: false,
generatePrimarySubtitle: !input.hasPrimaryManualSubtitle,
generateSecondarySubtitle: !input.hasSecondaryManualSubtitle && input.secondaryCanTranslate,
};
}
function preferredLangLabel(langCodes: string[], fallback: string): string {
return uniqueNormalizedLangCodes(langCodes)[0] || fallback;
}
function sourceTag(source: SubtitleCandidate['source']): string {
return source;
}
export function resolveWhisperBinary(args: Args): string | null {
const explicit = args.whisperBin.trim();
if (explicit) return resolvePathMaybe(explicit);
if (commandExists('whisper-cli')) return 'whisper-cli';
return null;
}
async function maybeFixSubtitleWithAi(
selectedPath: string,
args: Args,
expectedLanguage?: string,
): Promise<string> {
if (!args.youtubeFixWithAi || args.aiConfig.enabled !== true) {
return selectedPath;
}
const fixedContent = await runLoggedYoutubePhase(
{
startMessage: `Starting AI subtitle fix: ${path.basename(selectedPath)}`,
finishMessage: `Finished AI subtitle fix: ${path.basename(selectedPath)}`,
failureMessage: `AI subtitle fix failed: ${path.basename(selectedPath)}`,
log: (level, message) => log(level, args.logLevel, message),
},
async () => {
const originalContent = fs.readFileSync(selectedPath, 'utf8');
return fixSubtitleWithAi(
originalContent,
args.aiConfig,
(message) => {
log('warn', args.logLevel, message);
},
expectedLanguage,
);
},
);
if (!fixedContent) {
return selectedPath;
}
const fixedPath = selectedPath.replace(/\.srt$/i, '.fixed.srt');
fs.writeFileSync(fixedPath, fixedContent, 'utf8');
return fixedPath;
}
export async function generateYoutubeSubtitles(
target: string,
args: Args,
onReady?: (lang: 'primary' | 'secondary', pathToLoad: string) => Promise<void>,
): Promise<YoutubeSubgenOutputs> {
const outDir = path.resolve(resolvePathMaybe(args.youtubeSubgenOutDir));
fs.mkdirSync(outDir, { recursive: true });
const primaryLangCodes = uniqueNormalizedLangCodes(args.youtubePrimarySubLangs);
const secondaryLangCodes = uniqueNormalizedLangCodes(args.youtubeSecondarySubLangs);
const primaryLabel = preferredLangLabel(primaryLangCodes, 'primary');
const secondaryLabel = preferredLangLabel(secondaryLangCodes, 'secondary');
const secondaryCanUseWhisperTranslate =
secondaryLangCodes.includes('en') || secondaryLangCodes.includes('eng');
const manualLangs = toYtdlpLangPattern([...primaryLangCodes, ...secondaryLangCodes]);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yt-subgen-'));
const knownFiles = new Set<string>();
let keepTemp = args.youtubeSubgenKeepTemp;
const publishTrack = async (
lang: 'primary' | 'secondary',
source: SubtitleCandidate['source'],
selectedPath: string,
basename: string,
): Promise<string> => {
const langLabel = lang === 'primary' ? primaryLabel : secondaryLabel;
const taggedPath = path.join(outDir, `${basename}.${langLabel}.${sourceTag(source)}.srt`);
const aliasPath = path.join(outDir, `${basename}.${langLabel}.srt`);
fs.copyFileSync(selectedPath, taggedPath);
fs.copyFileSync(taggedPath, aliasPath);
log('info', args.logLevel, `Generated subtitle (${langLabel}, ${source}) -> ${aliasPath}`);
if (onReady) await onReady(lang, aliasPath);
return aliasPath;
};
try {
const meta = await runLoggedYoutubePhase(
{
startMessage: 'Starting YouTube metadata probe',
finishMessage: 'Finished YouTube metadata probe',
failureMessage: 'YouTube metadata probe failed',
log: (level, message) => log(level, args.logLevel, message),
},
() =>
runExternalCommand(
'yt-dlp',
['--dump-single-json', '--no-warnings', target],
{
captureStdout: true,
logLevel: args.logLevel,
commandLabel: 'yt-dlp:meta',
},
state.youtubeSubgenChildren,
),
);
const metadata = JSON.parse(meta.stdout) as { id?: string };
const videoId = metadata.id || `${Date.now()}`;
const basename = normalizeBasename(videoId, videoId);
await runLoggedYoutubePhase(
{
startMessage: `Starting manual subtitle probe (${manualLangs || 'requested langs'})`,
finishMessage: 'Finished manual subtitle probe',
failureMessage: 'Manual subtitle probe failed',
log: (level, message) => log(level, args.logLevel, message),
},
() =>
downloadManualSubtitles(
target,
tempDir,
manualLangs,
args.logLevel,
state.youtubeSubgenChildren,
),
);
const manualSubs = scanSubtitleCandidates(
tempDir,
knownFiles,
'manual',
primaryLangCodes,
secondaryLangCodes,
);
for (const sub of manualSubs) knownFiles.add(sub.path);
const selectedPrimary = pickBestCandidate(
manualSubs.filter((entry) => entry.lang === 'primary'),
);
const selectedSecondary = pickBestCandidate(
manualSubs.filter((entry) => entry.lang === 'secondary'),
);
const plan = planYoutubeSubtitleGeneration({
hasPrimaryManualSubtitle: Boolean(selectedPrimary),
hasSecondaryManualSubtitle: Boolean(selectedSecondary),
secondaryCanTranslate: secondaryCanUseWhisperTranslate,
});
let primaryAlias = '';
let secondaryAlias = '';
if (selectedPrimary) {
log(
'info',
args.logLevel,
`Using native YouTube subtitle track for primary (${primaryLabel}); skipping external subtitle copy.`,
);
}
if (selectedSecondary) {
log(
'info',
args.logLevel,
`Using native YouTube subtitle track for secondary (${secondaryLabel}); skipping external subtitle copy.`,
);
}
if (plan.generatePrimarySubtitle || plan.generateSecondarySubtitle) {
const whisperBin = resolveWhisperBinary(args);
const modelPath = args.whisperModel.trim()
? path.resolve(resolvePathMaybe(args.whisperModel.trim()))
: '';
const hasWhisperFallback = !!whisperBin && !!modelPath && fs.existsSync(modelPath);
if (!hasWhisperFallback) {
log(
'warn',
args.logLevel,
'Whisper fallback is not configured; continuing with available subtitle tracks.',
);
} else {
const audioPath = await runLoggedYoutubePhase(
{
startMessage: 'Starting fallback audio extraction for subtitle generation',
finishMessage: 'Finished fallback audio extraction',
failureMessage: 'Fallback audio extraction failed',
log: (level, message) => log(level, args.logLevel, message),
},
() =>
downloadYoutubeAudio(target, args, tempDir, state.youtubeSubgenChildren),
);
const whisperAudioPath = await runLoggedYoutubePhase(
{
startMessage: 'Starting ffmpeg audio prep for whisper',
finishMessage: 'Finished ffmpeg audio prep for whisper',
failureMessage: 'ffmpeg audio prep for whisper failed',
log: (level, message) => log(level, args.logLevel, message),
},
() => convertAudioForWhisper(audioPath, tempDir),
);
if (plan.generatePrimarySubtitle) {
try {
const primaryPrefix = path.join(tempDir, `${basename}.${primaryLabel}`);
const primarySrt = await runLoggedYoutubePhase(
{
startMessage: `Starting whisper primary subtitle generation (${primaryLabel})`,
finishMessage: `Finished whisper primary subtitle generation (${primaryLabel})`,
failureMessage: `Whisper primary subtitle generation failed (${primaryLabel})`,
log: (level, message) => log(level, args.logLevel, message),
},
() =>
runWhisper(whisperBin!, args, {
modelPath,
audioPath: whisperAudioPath,
language: args.youtubeWhisperSourceLanguage,
translate: false,
outputPrefix: primaryPrefix,
}),
);
const fixedPrimary = await maybeFixSubtitleWithAi(
primarySrt,
args,
args.youtubeWhisperSourceLanguage,
);
primaryAlias = await publishTrack(
'primary',
fixedPrimary === primarySrt ? 'whisper' : 'whisper-fixed',
fixedPrimary,
basename,
);
} catch (error) {
log(
'warn',
args.logLevel,
`Failed to generate primary subtitle via whisper fallback: ${(error as Error).message}`,
);
}
}
if (plan.generateSecondarySubtitle) {
try {
const secondaryPrefix = path.join(tempDir, `${basename}.${secondaryLabel}`);
const secondarySrt = await runLoggedYoutubePhase(
{
startMessage: `Starting whisper secondary subtitle generation (${secondaryLabel})`,
finishMessage: `Finished whisper secondary subtitle generation (${secondaryLabel})`,
failureMessage: `Whisper secondary subtitle generation failed (${secondaryLabel})`,
log: (level, message) => log(level, args.logLevel, message),
},
() =>
runWhisper(whisperBin!, args, {
modelPath,
audioPath: whisperAudioPath,
language: args.youtubeWhisperSourceLanguage,
translate: true,
outputPrefix: secondaryPrefix,
}),
);
const fixedSecondary = await maybeFixSubtitleWithAi(secondarySrt, args);
secondaryAlias = await publishTrack(
'secondary',
fixedSecondary === secondarySrt ? 'whisper-translate' : 'whisper-translate-fixed',
fixedSecondary,
basename,
);
} catch (error) {
log(
'warn',
args.logLevel,
`Failed to generate secondary subtitle via whisper fallback: ${(error as Error).message}`,
);
}
}
}
}
if (!secondaryCanUseWhisperTranslate && !selectedSecondary) {
log(
'warn',
args.logLevel,
`Secondary subtitle language (${secondaryLabel}) has no whisper translate fallback; relying on manual subtitles only.`,
);
}
if (!primaryAlias && !secondaryAlias && !selectedPrimary && !selectedSecondary) {
throw new Error('Failed to generate any subtitle tracks.');
}
if ((!primaryAlias && !selectedPrimary) || (!secondaryAlias && !selectedSecondary)) {
log(
'warn',
args.logLevel,
`Generated partial subtitle result: primary=${primaryAlias || selectedPrimary ? 'ok' : 'missing'}, secondary=${secondaryAlias || selectedSecondary ? 'ok' : 'missing'}`,
);
}
return {
basename,
primaryPath: primaryAlias || undefined,
secondaryPath: secondaryAlias || undefined,
primaryNative: Boolean(selectedPrimary),
secondaryNative: Boolean(selectedSecondary),
};
} catch (error) {
keepTemp = true;
throw error;
} finally {
if (keepTemp) {
log('warn', args.logLevel, `Keeping subtitle temp dir: ${tempDir}`);
} else {
try {
fs.rmSync(tempDir, { recursive: true, force: true });
} catch {
// ignore cleanup failures
}
}
}
}

View File

@@ -0,0 +1,55 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { runLoggedYoutubePhase } from './progress';
test('runLoggedYoutubePhase logs start and finish with elapsed time', async () => {
const entries: Array<{ level: 'info' | 'warn'; message: string }> = [];
let nowMs = 1_000;
const result = await runLoggedYoutubePhase(
{
startMessage: 'Starting subtitle probe',
finishMessage: 'Finished subtitle probe',
log: (level, message) => entries.push({ level, message }),
now: () => nowMs,
},
async () => {
nowMs = 2_500;
return 'ok';
},
);
assert.equal(result, 'ok');
assert.deepEqual(entries, [
{ level: 'info', message: 'Starting subtitle probe' },
{ level: 'info', message: 'Finished subtitle probe (1.5s)' },
]);
});
test('runLoggedYoutubePhase logs failure with elapsed time and rethrows', async () => {
const entries: Array<{ level: 'info' | 'warn'; message: string }> = [];
let nowMs = 5_000;
await assert.rejects(
runLoggedYoutubePhase(
{
startMessage: 'Starting whisper primary',
finishMessage: 'Finished whisper primary',
failureMessage: 'Failed whisper primary',
log: (level, message) => entries.push({ level, message }),
now: () => nowMs,
},
async () => {
nowMs = 8_200;
throw new Error('boom');
},
),
/boom/,
);
assert.deepEqual(entries, [
{ level: 'info', message: 'Starting whisper primary' },
{ level: 'warn', message: 'Failed whisper primary after 3.2s: boom' },
]);
});

View File

@@ -0,0 +1,33 @@
type PhaseLogLevel = 'info' | 'warn';
export interface RunLoggedYoutubePhaseOptions {
startMessage: string;
finishMessage: string;
failureMessage?: string;
log: (level: PhaseLogLevel, message: string) => void;
now?: () => number;
}
function formatElapsedMs(elapsedMs: number): string {
const seconds = Math.max(0, elapsedMs) / 1000;
return `${seconds.toFixed(1)}s`;
}
export async function runLoggedYoutubePhase<T>(
options: RunLoggedYoutubePhaseOptions,
run: () => Promise<T>,
): Promise<T> {
const now = options.now ?? Date.now;
const startedAt = now();
options.log('info', options.startMessage);
try {
const result = await run();
options.log('info', `${options.finishMessage} (${formatElapsedMs(now() - startedAt)})`);
return result;
} catch (error) {
const prefix = options.failureMessage ?? options.finishMessage;
const message = error instanceof Error ? error.message : String(error);
options.log('warn', `${prefix} after ${formatElapsedMs(now() - startedAt)}: ${message}`);
throw error;
}
}

View File

@@ -0,0 +1,32 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { parseSrt, stringifySrt } from './srt';
test('parseSrt reads cue numbering timing and text', () => {
const cues = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんにちは
2
00:00:02,500 --> 00:00:03,000
世界
`);
assert.equal(cues.length, 2);
assert.equal(cues[0]?.start, '00:00:01,000');
assert.equal(cues[0]?.end, '00:00:02,000');
assert.equal(cues[0]?.text, 'こんにちは');
assert.equal(cues[1]?.text, '世界');
});
test('stringifySrt preserves parseable cue structure', () => {
const roundTrip = stringifySrt(
parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんにちは
`),
);
assert.match(roundTrip, /1\n00:00:01,000 --> 00:00:02,000\nこんにちは/);
});

40
launcher/youtube/srt.ts Normal file
View File

@@ -0,0 +1,40 @@
export interface SrtCue {
index: number;
start: string;
end: string;
text: string;
}
const TIMING_LINE_PATTERN =
/^(?<start>\d{2}:\d{2}:\d{2},\d{3}) --> (?<end>\d{2}:\d{2}:\d{2},\d{3})$/;
export function parseSrt(content: string): SrtCue[] {
const normalized = content.replace(/\r\n/g, '\n').trim();
if (!normalized) return [];
return normalized
.split(/\n{2,}/)
.map((block) => {
const lines = block.split('\n');
const index = Number.parseInt(lines[0] || '', 10);
const timingLine = lines[1] || '';
const timingMatch = TIMING_LINE_PATTERN.exec(timingLine);
if (!Number.isInteger(index) || !timingMatch?.groups) {
throw new Error(`Invalid SRT cue block: ${block}`);
}
return {
index,
start: timingMatch.groups.start!,
end: timingMatch.groups.end!,
text: lines.slice(2).join('\n').trim(),
} satisfies SrtCue;
})
.filter((cue) => cue.text.length > 0);
}
export function stringifySrt(cues: SrtCue[]): string {
return cues
.map((cue, idx) => `${idx + 1}\n${cue.start} --> ${cue.end}\n${cue.text.trim()}\n`)
.join('\n')
.trimEnd();
}

View File

@@ -0,0 +1,126 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { applyFixedCueBatch, parseAiSubtitleFixResponse } from './subtitle-fix-ai';
import { parseSrt } from './srt';
test('applyFixedCueBatch accepts content-only fixes with identical timing', () => {
const original = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんいちは
2
00:00:03,000 --> 00:00:04,000
世界
`);
const fixed = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんにちは
2
00:00:03,000 --> 00:00:04,000
世界
`);
const merged = applyFixedCueBatch(original, fixed);
assert.equal(merged[0]?.text, 'こんにちは');
});
test('applyFixedCueBatch rejects changed timestamps', () => {
const original = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんいちは
`);
const fixed = parseSrt(`1
00:00:01,100 --> 00:00:02,000
こんにちは
`);
assert.throws(() => applyFixedCueBatch(original, fixed), /timestamps/i);
});
test('parseAiSubtitleFixResponse accepts valid SRT wrapped in markdown fences', () => {
const original = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんいちは
2
00:00:03,000 --> 00:00:04,000
世界
`);
const parsed = parseAiSubtitleFixResponse(
original,
'```srt\n1\n00:00:01,000 --> 00:00:02,000\nこんにちは\n\n2\n00:00:03,000 --> 00:00:04,000\n世界\n```',
);
assert.equal(parsed[0]?.text, 'こんにちは');
assert.equal(parsed[1]?.text, '世界');
});
test('parseAiSubtitleFixResponse accepts text-only one-block-per-cue output', () => {
const original = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんいちは
2
00:00:03,000 --> 00:00:04,000
世界
`);
const parsed = parseAiSubtitleFixResponse(
original,
`こんにちは
世界`,
);
assert.equal(parsed[0]?.start, '00:00:01,000');
assert.equal(parsed[0]?.text, 'こんにちは');
assert.equal(parsed[1]?.end, '00:00:04,000');
assert.equal(parsed[1]?.text, '世界');
});
test('parseAiSubtitleFixResponse rejects unrecoverable text-only output', () => {
const original = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんいちは
2
00:00:03,000 --> 00:00:04,000
世界
`);
assert.throws(
() => parseAiSubtitleFixResponse(original, 'こんにちは\n世界\n余分です'),
/cue block|cue count/i,
);
});
test('parseAiSubtitleFixResponse rejects language drift for primary Japanese subtitles', () => {
const original = parseSrt(`1
00:00:01,000 --> 00:00:02,000
こんにちは
2
00:00:03,000 --> 00:00:04,000
今日はいい天気ですね
`);
assert.throws(
() =>
parseAiSubtitleFixResponse(
original,
`1
00:00:01,000 --> 00:00:02,000
Hello
2
00:00:03,000 --> 00:00:04,000
The weather is nice today
`,
'ja',
),
/language/i,
);
});

View File

@@ -0,0 +1,213 @@
import type { LauncherAiConfig } from '../types.js';
import { requestAiChatCompletion, resolveAiApiKey } from '../../src/ai/client.js';
import { parseSrt, stringifySrt, type SrtCue } from './srt.js';
const DEFAULT_SUBTITLE_FIX_PROMPT =
'Fix transcription mistakes only. Preserve cue numbering, timestamps, and valid SRT formatting exactly. Return only corrected SRT.';
const SRT_BLOCK_PATTERN =
/(?:^|\n)(\d+\n\d{2}:\d{2}:\d{2},\d{3} --> \d{2}:\d{2}:\d{2},\d{3}[\s\S]*)$/;
const CODE_FENCE_PATTERN = /^```(?:\w+)?\s*\n([\s\S]*?)\n```$/;
const JAPANESE_CHAR_PATTERN = /[\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Han}]/gu;
const LATIN_LETTER_PATTERN = /\p{Script=Latin}/gu;
export function applyFixedCueBatch(original: SrtCue[], fixed: SrtCue[]): SrtCue[] {
if (original.length !== fixed.length) {
throw new Error('Fixed subtitle batch must preserve cue count.');
}
return original.map((cue, index) => {
const nextCue = fixed[index];
if (!nextCue) {
throw new Error('Missing fixed subtitle cue.');
}
if (cue.start !== nextCue.start || cue.end !== nextCue.end) {
throw new Error('Fixed subtitle batch must preserve cue timestamps.');
}
return {
...cue,
text: nextCue.text,
};
});
}
function chunkCues(cues: SrtCue[], size: number): SrtCue[][] {
const chunks: SrtCue[][] = [];
for (let index = 0; index < cues.length; index += size) {
chunks.push(cues.slice(index, index + size));
}
return chunks;
}
function normalizeAiSubtitleFixCandidates(content: string): string[] {
const trimmed = content.replace(/\r\n/g, '\n').trim();
if (!trimmed) {
return [];
}
const candidates = new Set<string>([trimmed]);
const fenced = CODE_FENCE_PATTERN.exec(trimmed)?.[1]?.trim();
if (fenced) {
candidates.add(fenced);
}
const srtBlock = SRT_BLOCK_PATTERN.exec(trimmed)?.[1]?.trim();
if (srtBlock) {
candidates.add(srtBlock);
}
return [...candidates];
}
function parseTextOnlyCueBatch(original: SrtCue[], content: string): SrtCue[] {
const paragraphBlocks = content
.split(/\n{2,}/)
.map((block) => block.trim())
.filter((block) => block.length > 0);
if (paragraphBlocks.length === original.length) {
return original.map((cue, index) => ({
...cue,
text: paragraphBlocks[index]!,
}));
}
const lineBlocks = content
.split('\n')
.map((line) => line.trim())
.filter((line) => line.length > 0);
if (lineBlocks.length === original.length) {
return original.map((cue, index) => ({
...cue,
text: lineBlocks[index]!,
}));
}
throw new Error('Fixed subtitle batch must preserve cue count.');
}
function countPatternMatches(content: string, pattern: RegExp): number {
pattern.lastIndex = 0;
return [...content.matchAll(pattern)].length;
}
function isJapaneseLanguageCode(language: string | undefined): boolean {
if (!language) return false;
const normalized = language.trim().toLowerCase();
return normalized === 'ja' || normalized === 'jp' || normalized === 'jpn';
}
function validateExpectedLanguage(
original: SrtCue[],
fixed: SrtCue[],
expectedLanguage: string | undefined,
): void {
if (!isJapaneseLanguageCode(expectedLanguage)) return;
const originalText = original.map((cue) => cue.text).join('\n');
const fixedText = fixed.map((cue) => cue.text).join('\n');
const originalJapaneseChars = countPatternMatches(originalText, JAPANESE_CHAR_PATTERN);
if (originalJapaneseChars < 4) return;
const fixedJapaneseChars = countPatternMatches(fixedText, JAPANESE_CHAR_PATTERN);
const fixedLatinLetters = countPatternMatches(fixedText, LATIN_LETTER_PATTERN);
if (fixedJapaneseChars === 0 && fixedLatinLetters >= 4) {
throw new Error('Fixed subtitle batch changed language away from expected Japanese.');
}
}
export function parseAiSubtitleFixResponse(
original: SrtCue[],
content: string,
expectedLanguage?: string,
): SrtCue[] {
const candidates = normalizeAiSubtitleFixCandidates(content);
let lastError: Error | null = null;
for (const candidate of candidates) {
try {
const parsed = parseSrt(candidate);
validateExpectedLanguage(original, parsed, expectedLanguage);
return parsed;
} catch (error) {
lastError = error as Error;
}
}
for (const candidate of candidates) {
try {
const parsed = parseTextOnlyCueBatch(original, candidate);
validateExpectedLanguage(original, parsed, expectedLanguage);
return parsed;
} catch (error) {
lastError = error as Error;
}
}
throw lastError ?? new Error('AI subtitle fix returned empty content.');
}
export async function fixSubtitleWithAi(
subtitleContent: string,
aiConfig: LauncherAiConfig,
logWarning: (message: string) => void,
expectedLanguage?: string,
): Promise<string | null> {
if (aiConfig.enabled !== true) {
return null;
}
const apiKey = await resolveAiApiKey(aiConfig);
if (!apiKey) {
return null;
}
const cues = parseSrt(subtitleContent);
if (cues.length === 0) {
return null;
}
const fixedChunks: SrtCue[] = [];
for (const chunk of chunkCues(cues, 25)) {
const fixedContent = await requestAiChatCompletion(
{
apiKey,
baseUrl: aiConfig.baseUrl,
model: aiConfig.model,
timeoutMs: aiConfig.requestTimeoutMs,
messages: [
{
role: 'system',
content: aiConfig.systemPrompt?.trim() || DEFAULT_SUBTITLE_FIX_PROMPT,
},
{
role: 'user',
content: stringifySrt(chunk),
},
],
},
{
logWarning,
},
);
if (!fixedContent) {
return null;
}
let parsedFixed: SrtCue[];
try {
parsedFixed = parseAiSubtitleFixResponse(chunk, fixedContent, expectedLanguage);
} catch (error) {
logWarning(`AI subtitle fix returned invalid SRT: ${(error as Error).message}`);
return null;
}
try {
fixedChunks.push(...applyFixedCueBatch(chunk, parsedFixed));
} catch (error) {
logWarning(`AI subtitle fix validation failed: ${(error as Error).message}`);
return null;
}
}
return stringifySrt(fixedChunks);
}

View File

@@ -0,0 +1,47 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { buildWhisperArgs } from './whisper';
test('buildWhisperArgs includes threads and optional VAD flags', () => {
assert.deepEqual(
buildWhisperArgs({
modelPath: '/models/ggml-large-v2.bin',
audioPath: '/tmp/input.wav',
outputPrefix: '/tmp/output',
language: 'ja',
translate: false,
threads: 8,
vadModelPath: '/models/vad.bin',
}),
[
'-m',
'/models/ggml-large-v2.bin',
'-f',
'/tmp/input.wav',
'--output-srt',
'--output-file',
'/tmp/output',
'--language',
'ja',
'--threads',
'8',
'-vm',
'/models/vad.bin',
'--vad',
],
);
});
test('buildWhisperArgs includes translate flag when requested', () => {
assert.ok(
buildWhisperArgs({
modelPath: '/models/base.bin',
audioPath: '/tmp/input.wav',
outputPrefix: '/tmp/output',
language: 'ja',
translate: true,
threads: 4,
}).includes('--translate'),
);
});

View File

@@ -0,0 +1,60 @@
import fs from 'node:fs';
import type { Args } from '../types.js';
import { runExternalCommand } from '../util.js';
export interface BuildWhisperArgsOptions {
modelPath: string;
audioPath: string;
outputPrefix: string;
language: string;
translate: boolean;
threads: number;
vadModelPath?: string;
}
export function buildWhisperArgs(options: BuildWhisperArgsOptions): string[] {
const args = [
'-m',
options.modelPath,
'-f',
options.audioPath,
'--output-srt',
'--output-file',
options.outputPrefix,
'--language',
options.language,
'--threads',
String(options.threads),
];
if (options.translate) args.push('--translate');
if (options.vadModelPath) {
args.push('-vm', options.vadModelPath, '--vad');
}
return args;
}
export async function runWhisper(
whisperBin: string,
args: Args,
options: Omit<BuildWhisperArgsOptions, 'threads' | 'vadModelPath'>,
): Promise<string> {
const vadModelPath =
args.whisperVadModel.trim() && fs.existsSync(args.whisperVadModel.trim())
? args.whisperVadModel.trim()
: undefined;
const whisperArgs = buildWhisperArgs({
...options,
threads: args.whisperThreads,
vadModelPath,
});
await runExternalCommand(whisperBin, whisperArgs, {
commandLabel: 'whisper',
streamOutput: true,
});
const outputPath = `${options.outputPrefix}.srt`;
if (!fs.existsSync(outputPath)) {
throw new Error(`whisper output not found: ${outputPath}`);
}
return outputPath;
}

View File

@@ -1,6 +1,6 @@
{
"name": "subminer",
"version": "0.4.0",
"version": "0.5.3",
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"packageManager": "bun@1.3.5",
"main": "dist/main-entry.js",
@@ -11,9 +11,15 @@
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
"build:yomitan": "node scripts/build-yomitan.mjs",
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
"build:yomitan": "bun scripts/build-yomitan.mjs",
"build:assets": "bun scripts/prepare-build-assets.mjs",
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && bun run build:assets",
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
"changelog:build": "bun run scripts/build-changelog.ts build",
"changelog:check": "bun run scripts/build-changelog.ts check",
"changelog:lint": "bun run scripts/build-changelog.ts lint",
"changelog:pr-check": "bun run scripts/build-changelog.ts pr-check",
"changelog:release-notes": "bun run scripts/build-changelog.ts release-notes",
"format": "prettier --write .",
"format:check": "prettier --check .",
"format:src": "bash scripts/prettier-scope.sh --write",
@@ -21,38 +27,41 @@
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js",
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
"test:immersion:sqlite:src": "bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts",
"test:immersion:sqlite:dist": "node --experimental-sqlite --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
"test:immersion:sqlite:dist": "bun test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
"test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist",
"test:src": "node scripts/run-test-lane.mjs bun-src-full",
"test:launcher:unit:src": "node scripts/run-test-lane.mjs bun-launcher-unit",
"test:src": "bun scripts/run-test-lane.mjs bun-src-full",
"test:launcher:unit:src": "bun scripts/run-test-lane.mjs bun-launcher-unit",
"test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src",
"test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src",
"test:node:compat": "bun run tsc && node --experimental-sqlite --test dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/overlay-manager.test.js dist/main/config-validation.test.js dist/main/runtime/registry.test.js dist/main/runtime/startup-config.test.js",
"test:runtime:compat": "bun run tsc && bun test dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/overlay-manager.test.js dist/main/config-validation.test.js dist/main/runtime/registry.test.js dist/main/runtime/startup-config.test.js",
"test:node:compat": "bun run test:runtime:compat",
"test:full": "bun run test:src && bun run test:launcher:unit:src && bun run test:node:compat",
"test": "bun run test:fast",
"test:config": "bun run test:config:src",
"test:launcher": "bun run test:launcher:src",
"test:core": "bun run test:core:src",
"test:subtitle": "bun run test:subtitle:src",
"test:fast": "bun run test:config:src && bun run test:core:src && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts && bun run tsc && node --experimental-sqlite --test dist/main/runtime/registry.test.js",
"test:fast": "bun run test:config:src && bun run test:core:src && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/release-workflow.test.ts src/ci-workflow.test.ts scripts/build-changelog.test.ts && bun run tsc && bun test dist/main/runtime/registry.test.js",
"generate:config-example": "bun run build && bun dist/generate-config-example.js",
"start": "bun run build && electron . --start",
"dev": "bun run build && electron . --start --dev",
"stop": "electron . --stop",
"toggle": "electron . --toggle",
"build:appimage": "bun run build && electron-builder --linux AppImage",
"build:mac": "bun run build && electron-builder --mac dmg zip",
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip",
"build:mac:zip": "bun run build && electron-builder --mac zip"
"build:appimage": "bun run build && electron-builder --linux AppImage --publish never",
"build:mac": "bun run build && electron-builder --mac dmg zip --publish never",
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip --publish never",
"build:mac:zip": "bun run build && electron-builder --mac zip --publish never",
"build:win": "bun run build && electron-builder --win nsis zip --publish never",
"build:win:unsigned": "bun run build && node scripts/build-win-unsigned.mjs"
},
"keywords": [
"anki",
@@ -71,6 +80,7 @@
"commander": "^14.0.3",
"discord-rpc": "^4.0.1",
"jsonc-parser": "^3.3.1",
"libsql": "^0.5.22",
"ws": "^8.19.0"
},
"devDependencies": {
@@ -109,7 +119,26 @@
"icon": "assets/SubMiner.png",
"hardenedRuntime": true,
"entitlements": "build/entitlements.mac.plist",
"entitlementsInherit": "build/entitlements.mac.plist"
"entitlementsInherit": "build/entitlements.mac.plist",
"extraResources": [
{
"from": "dist/scripts/get-mpv-window-macos",
"to": "scripts/get-mpv-window-macos"
}
]
},
"win": {
"target": [
"nsis",
"zip"
],
"icon": "assets/SubMiner.png"
},
"nsis": {
"oneClick": false,
"perMachine": false,
"allowToChangeInstallationDirectory": true,
"include": "build/installer.nsh"
},
"files": [
"dist/**/*",
@@ -140,8 +169,8 @@
"to": "plugin/subminer.conf"
},
{
"from": "dist/scripts/get-mpv-window-macos",
"to": "scripts/get-mpv-window-macos"
"from": "dist/scripts/get-mpv-window-windows.ps1",
"to": "scripts/get-mpv-window-windows.ps1"
}
]
}

View File

@@ -4,10 +4,12 @@
# Path to SubMiner binary (leave empty for auto-detection)
# Auto-detection searches common locations, including:
# - macOS: /Applications/SubMiner.app/Contents/MacOS/SubMiner, ~/Applications/SubMiner.app/Contents/MacOS/SubMiner
# - Windows: %LOCALAPPDATA%\Programs\SubMiner\SubMiner.exe, %ProgramFiles%\SubMiner\SubMiner.exe
# - Linux: ~/.local/bin/SubMiner.AppImage, /opt/SubMiner/SubMiner.AppImage, /usr/local/bin/SubMiner, /usr/bin/SubMiner
binary_path=
# Path to mpv IPC socket (must match input-ipc-server in mpv.conf)
# Windows installs rewrite this to \\.\pipe\subminer-socket during installation.
socket_path=/tmp/subminer-socket
# Enable texthooker WebSocket server

View File

@@ -31,6 +31,18 @@ function M.create(ctx)
return encoded:gsub(" ", "%%20")
end
local function is_remote_media_path()
local media_path = mp.get_property("path")
if type(media_path) ~= "string" then
return false
end
local trimmed = media_path:match("^%s*(.-)%s*$") or ""
if trimmed == "" then
return false
end
return trimmed:match("^%a[%w+.-]*://") ~= nil
end
local function parse_json_payload(text)
if type(text) ~= "string" then
return nil
@@ -523,6 +535,10 @@ function M.create(ctx)
end
local function should_fetch_aniskip_async(trigger_source, callback)
if is_remote_media_path() then
callback(false, "remote-url")
return
end
if trigger_source == "script-message" or trigger_source == "overlay-start" then
callback(true, trigger_source)
return

View File

@@ -1,6 +1,7 @@
local M = {}
function M.create(ctx)
local mp = ctx.mp
local utils = ctx.utils
local opts = ctx.opts
local state = ctx.state
@@ -26,6 +27,13 @@ function M.create(ctx)
end
local function binary_candidates_from_app_path(app_path)
if environment.is_windows() then
return {
utils.join_path(app_path, "SubMiner.exe"),
utils.join_path(app_path, "subminer.exe"),
}
end
return {
utils.join_path(app_path, "Contents", "MacOS", "SubMiner"),
utils.join_path(app_path, "Contents", "MacOS", "subminer"),
@@ -43,6 +51,11 @@ function M.create(ctx)
return true
end
local function directory_exists(path)
local info = utils.file_info(path)
return info ~= nil and info.is_dir == true
end
local function resolve_binary_candidate(candidate)
local normalized = normalize_binary_path_candidate(candidate)
if not normalized then
@@ -53,6 +66,25 @@ function M.create(ctx)
return normalized
end
if environment.is_windows() then
if not normalized:lower():match("%.exe$") then
local with_exe = normalized .. ".exe"
if file_exists(with_exe) then
return with_exe
end
end
if directory_exists(normalized) then
for _, path in ipairs(binary_candidates_from_app_path(normalized)) do
if file_exists(path) then
return path
end
end
end
return nil
end
if not normalized:lower():find("%.app") then
return nil
end
@@ -89,6 +121,109 @@ function M.create(ctx)
return nil
end
local function add_search_path(search_paths, candidate)
if type(candidate) == "string" and candidate ~= "" then
search_paths[#search_paths + 1] = candidate
end
end
local function trim_subprocess_stdout(value)
if type(value) ~= "string" then
return nil
end
local trimmed = value:match("^%s*(.-)%s*$") or ""
if trimmed == "" then
return nil
end
return trimmed
end
local function find_windows_binary_via_system_lookup()
if not environment.is_windows() then
return nil
end
if not mp or type(mp.command_native) ~= "function" then
return nil
end
local script = [=[
function Emit-FirstExistingPath {
param([string[]]$Candidates)
foreach ($candidate in $Candidates) {
if ([string]::IsNullOrWhiteSpace($candidate)) {
continue
}
if (Test-Path -LiteralPath $candidate -PathType Leaf) {
Write-Output $candidate
exit 0
}
}
}
$runningProcess = Get-CimInstance Win32_Process |
Where-Object { $_.Name -ieq 'SubMiner.exe' -or $_.Name -ieq 'subminer.exe' } |
Select-Object -First 1 -Property ExecutablePath, CommandLine
if ($null -ne $runningProcess) {
Emit-FirstExistingPath @($runningProcess.ExecutablePath)
}
$localAppData = [Environment]::GetFolderPath('LocalApplicationData')
$programFiles = [Environment]::GetFolderPath('ProgramFiles')
$programFilesX86 = ${env:ProgramFiles(x86)}
Emit-FirstExistingPath @(
$(if (-not [string]::IsNullOrWhiteSpace($localAppData)) { Join-Path $localAppData 'Programs\SubMiner\SubMiner.exe' } else { $null }),
$(if (-not [string]::IsNullOrWhiteSpace($programFiles)) { Join-Path $programFiles 'SubMiner\SubMiner.exe' } else { $null }),
$(if (-not [string]::IsNullOrWhiteSpace($programFilesX86)) { Join-Path $programFilesX86 'SubMiner\SubMiner.exe' } else { $null }),
'C:\SubMiner\SubMiner.exe'
)
foreach ($registryPath in @(
'HKCU:\Software\Microsoft\Windows\CurrentVersion\App Paths\SubMiner.exe',
'HKLM:\Software\Microsoft\Windows\CurrentVersion\App Paths\SubMiner.exe',
'HKLM:\Software\WOW6432Node\Microsoft\Windows\CurrentVersion\App Paths\SubMiner.exe'
)) {
try {
$appPath = (Get-ItemProperty -Path $registryPath -ErrorAction Stop).'(default)'
Emit-FirstExistingPath @($appPath)
} catch {
}
}
try {
$commandPath = Get-Command SubMiner.exe -ErrorAction Stop | Select-Object -First 1 -ExpandProperty Source
Emit-FirstExistingPath @($commandPath)
} catch {
}
]=]
local result = mp.command_native({
name = "subprocess",
args = {
"powershell.exe",
"-NoProfile",
"-ExecutionPolicy",
"Bypass",
"-Command",
script,
},
playback_only = false,
capture_stdout = true,
capture_stderr = false,
})
if not result or result.status ~= 0 then
return nil
end
local candidate = trim_subprocess_stdout(result.stdout)
if not candidate then
return nil
end
return resolve_binary_candidate(candidate)
end
local function find_binary()
local override = find_binary_override()
if override then
@@ -100,17 +235,34 @@ function M.create(ctx)
return configured
end
local search_paths = {
"/Applications/SubMiner.app/Contents/MacOS/SubMiner",
utils.join_path(os.getenv("HOME") or "", "Applications/SubMiner.app/Contents/MacOS/SubMiner"),
"C:\\Program Files\\SubMiner\\SubMiner.exe",
"C:\\Program Files (x86)\\SubMiner\\SubMiner.exe",
"C:\\SubMiner\\SubMiner.exe",
utils.join_path(os.getenv("HOME") or "", ".local/bin/SubMiner.AppImage"),
"/opt/SubMiner/SubMiner.AppImage",
"/usr/local/bin/SubMiner",
"/usr/bin/SubMiner",
}
local system_lookup_binary = find_windows_binary_via_system_lookup()
if system_lookup_binary then
subminer_log("info", "binary", "Found Windows binary via system lookup at: " .. system_lookup_binary)
return system_lookup_binary
end
local home = os.getenv("HOME") or os.getenv("USERPROFILE") or ""
local app_data = os.getenv("APPDATA") or ""
local app_data_local = app_data ~= "" and app_data:gsub("[/\\][Rr][Oo][Aa][Mm][Ii][Nn][Gg]$", "\\Local") or ""
local local_app_data = os.getenv("LOCALAPPDATA") or utils.join_path(home, "AppData", "Local")
local program_files = os.getenv("ProgramFiles") or "C:\\Program Files"
local program_files_x86 = os.getenv("ProgramFiles(x86)") or "C:\\Program Files (x86)"
local search_paths = {}
if environment.is_windows() then
add_search_path(search_paths, utils.join_path(app_data_local, "Programs", "SubMiner", "SubMiner.exe"))
add_search_path(search_paths, utils.join_path(local_app_data, "Programs", "SubMiner", "SubMiner.exe"))
add_search_path(search_paths, utils.join_path(program_files, "SubMiner", "SubMiner.exe"))
add_search_path(search_paths, utils.join_path(program_files_x86, "SubMiner", "SubMiner.exe"))
add_search_path(search_paths, "C:\\SubMiner\\SubMiner.exe")
else
add_search_path(search_paths, "/Applications/SubMiner.app/Contents/MacOS/SubMiner")
add_search_path(search_paths, utils.join_path(home, "Applications", "SubMiner.app", "Contents", "MacOS", "SubMiner"))
add_search_path(search_paths, utils.join_path(home, ".local", "bin", "SubMiner.AppImage"))
add_search_path(search_paths, "/opt/SubMiner/SubMiner.AppImage")
add_search_path(search_paths, "/usr/local/bin/SubMiner")
add_search_path(search_paths, "/usr/bin/SubMiner")
end
for _, path in ipairs(search_paths) do
if file_exists(path) then

View File

@@ -1,6 +1,12 @@
local M = {}
local BOOTSTRAP_GUARD_KEY = "__subminer_plugin_bootstrapped"
function M.init()
if rawget(_G, BOOTSTRAP_GUARD_KEY) == true then
return
end
rawset(_G, BOOTSTRAP_GUARD_KEY, true)
local input = require("mp.input")
local mp = require("mp")
local msg = require("mp.msg")

View File

@@ -61,10 +61,9 @@ function M.create(ctx)
aniskip.clear_aniskip_state()
hover.clear_hover_overlay()
process.disarm_auto_play_ready_gate()
if state.overlay_running or state.texthooker_running then
subminer_log("info", "lifecycle", "mpv shutting down, stopping SubMiner process")
show_osd("Shutting down...")
process.stop_overlay()
if state.overlay_running then
subminer_log("info", "lifecycle", "mpv shutting down, hiding SubMiner overlay")
process.hide_visible_overlay()
end
end
@@ -77,6 +76,9 @@ function M.create(ctx)
mp.register_event("end-file", function()
process.disarm_auto_play_ready_gate()
hover.clear_hover_overlay()
if state.overlay_running then
process.hide_visible_overlay()
end
end)
mp.register_event("shutdown", function()
hover.clear_hover_overlay()

View File

@@ -22,4 +22,9 @@ if not package.path:find(module_patterns, 1, true) then
package.path = module_patterns .. package.path
end
require("init").init()
local init_module = assert(loadfile(script_dir .. "/init.lua"))()
if type(init_module) == "table" and type(init_module.init) == "function" then
init_module.init()
elseif type(init_module) == "function" then
init_module()
end

View File

@@ -1,5 +1,27 @@
local M = {}
local function normalize_socket_path_option(socket_path, default_socket_path)
if type(default_socket_path) ~= "string" then
return socket_path
end
local trimmed_default = default_socket_path:match("^%s*(.-)%s*$")
local trimmed_socket = type(socket_path) == "string" and socket_path:match("^%s*(.-)%s*$") or socket_path
if trimmed_default ~= "\\\\.\\pipe\\subminer-socket" then
return trimmed_socket
end
if type(trimmed_socket) ~= "string" or trimmed_socket == "" then
return trimmed_default
end
if trimmed_socket == "/tmp/subminer-socket" or trimmed_socket == "\\tmp\\subminer-socket" then
return trimmed_default
end
if trimmed_socket == "\\\\.\\pipe\\tmp\\subminer-socket" then
return trimmed_default
end
return trimmed_socket
end
function M.load(options_lib, default_socket_path)
local opts = {
binary_path = "",
@@ -25,6 +47,7 @@ function M.load(options_lib, default_socket_path)
}
options_lib.read_options(opts, "subminer")
opts.socket_path = normalize_socket_path_option(opts.socket_path, default_socket_path)
return opts
end

View File

@@ -411,6 +411,28 @@ function M.create(ctx)
show_osd("Stopped")
end
local function hide_visible_overlay()
if not binary.ensure_binary_available() then
subminer_log("error", "binary", "SubMiner binary not found")
return
end
run_control_command_async("hide-visible-overlay", nil, function(ok, result)
if ok then
subminer_log("info", "process", "Visible overlay hidden")
else
subminer_log(
"warn",
"process",
"Hide-visible-overlay command returned non-zero status: "
.. tostring(result and result.status or "unknown")
)
end
end)
disarm_auto_play_ready_gate()
end
local function toggle_overlay()
if not binary.ensure_binary_available() then
subminer_log("error", "binary", "SubMiner binary not found")
@@ -511,6 +533,7 @@ function M.create(ctx)
start_overlay = start_overlay,
start_overlay_from_script_message = start_overlay_from_script_message,
stop_overlay = stop_overlay,
hide_visible_overlay = hide_visible_overlay,
toggle_overlay = toggle_overlay,
open_options = open_options,
restart_overlay = restart_overlay,

16
release/release-notes.md Normal file
View File

@@ -0,0 +1,16 @@
## Highlights
### Changed
- Release: Publish unsigned Windows `.exe` and `.zip` artifacts directly from release CI instead of routing them through SignPath.
- Release: Added `bun run build:win:unsigned` for explicit local unsigned Windows packaging.
## Installation
See the README and docs/installation guide for full setup steps.
## Assets
- Linux: `SubMiner.AppImage`
- macOS: `SubMiner-*.dmg` and `SubMiner-*.zip`
- Optional extras: `subminer-assets.tar.gz` and the `subminer` launcher
Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.

View File

@@ -0,0 +1,184 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import path from 'node:path';
import test from 'node:test';
async function loadModule() {
return import('./build-changelog');
}
function createWorkspace(name: string): string {
const baseDir = path.join(process.cwd(), '.tmp', 'build-changelog-test');
fs.mkdirSync(baseDir, { recursive: true });
return fs.mkdtempSync(path.join(baseDir, `${name}-`));
}
test('resolveChangelogOutputPaths stays repo-local and never writes docs paths', async () => {
const { resolveChangelogOutputPaths } = await loadModule();
const workspace = createWorkspace('with-docs-repo');
const projectRoot = path.join(workspace, 'SubMiner');
fs.mkdirSync(projectRoot, { recursive: true });
try {
const outputPaths = resolveChangelogOutputPaths({ cwd: projectRoot });
assert.deepEqual(outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
assert.equal(outputPaths.includes(path.join(projectRoot, 'docs', 'changelog.md')), false);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('writeChangelogArtifacts ignores README, groups fragments by type, writes release notes, and deletes only fragment files', async () => {
const { writeChangelogArtifacts } = await loadModule();
const workspace = createWorkspace('write-artifacts');
const projectRoot = path.join(workspace, 'SubMiner');
const existingChangelog = ['# Changelog', '', '## v0.4.0 (2026-03-01)', '- Existing fix', ''].join('\n');
fs.mkdirSync(projectRoot, { recursive: true });
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8');
fs.writeFileSync(path.join(projectRoot, 'changes', 'README.md'), '# Changelog Fragments\n\nIgnored helper text.\n', 'utf8');
fs.writeFileSync(
path.join(projectRoot, 'changes', '001.md'),
['type: added', 'area: overlay', '', '- Added release fragments.'].join('\n'),
'utf8',
);
fs.writeFileSync(
path.join(projectRoot, 'changes', '002.md'),
['type: fixed', 'area: release', '', 'Fixed release notes generation.'].join('\n'),
'utf8',
);
try {
const result = writeChangelogArtifacts({
cwd: projectRoot,
version: '0.4.1',
date: '2026-03-07',
});
assert.deepEqual(result.outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
assert.deepEqual(
result.deletedFragmentPaths,
[
path.join(projectRoot, 'changes', '001.md'),
path.join(projectRoot, 'changes', '002.md'),
],
);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '001.md')), false);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '002.md')), false);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', 'README.md')), true);
const changelog = fs.readFileSync(path.join(projectRoot, 'CHANGELOG.md'), 'utf8');
assert.match(
changelog,
/^# Changelog\n\n## v0\.4\.1 \(2026-03-07\)\n\n### Added\n- Overlay: Added release fragments\.\n\n### Fixed\n- Release: Fixed release notes generation\.\n\n## v0\.4\.0 \(2026-03-01\)\n- Existing fix\n$/m,
);
const releaseNotes = fs.readFileSync(path.join(projectRoot, 'release', 'release-notes.md'), 'utf8');
assert.match(releaseNotes, /## Highlights\n### Added\n- Overlay: Added release fragments\./);
assert.match(releaseNotes, /### Fixed\n- Release: Fixed release notes generation\./);
assert.match(releaseNotes, /## Installation\n\nSee the README and docs\/installation guide/);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('verifyChangelogReadyForRelease ignores README but rejects pending fragments and missing version sections', async () => {
const { verifyChangelogReadyForRelease } = await loadModule();
const workspace = createWorkspace('verify-release');
const projectRoot = path.join(workspace, 'SubMiner');
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), '# Changelog\n', 'utf8');
fs.writeFileSync(path.join(projectRoot, 'changes', 'README.md'), '# Changelog Fragments\n', 'utf8');
fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), '- Pending fragment.\n', 'utf8');
try {
assert.throws(
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
/Pending changelog fragments/,
);
fs.rmSync(path.join(projectRoot, 'changes', '001.md'));
assert.throws(
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
/Missing CHANGELOG section for v0\.4\.1/,
);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('verifyChangelogFragments rejects invalid metadata', async () => {
const { verifyChangelogFragments } = await loadModule();
const workspace = createWorkspace('lint-invalid');
const projectRoot = path.join(workspace, 'SubMiner');
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(
path.join(projectRoot, 'changes', '001.md'),
['type: nope', 'area: overlay', '', '- Invalid type.'].join('\n'),
'utf8',
);
try {
assert.throws(
() => verifyChangelogFragments({ cwd: projectRoot }),
/must declare type as one of/,
);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('verifyPullRequestChangelog requires fragments for user-facing changes and skips docs-only changes', async () => {
const { verifyPullRequestChangelog } = await loadModule();
assert.throws(
() =>
verifyPullRequestChangelog({
changedEntries: [{ path: 'src/main-entry.ts', status: 'M' }],
changedLabels: [],
}),
/requires a changelog fragment/,
);
assert.doesNotThrow(() =>
verifyPullRequestChangelog({
changedEntries: [{ path: 'docs/RELEASING.md', status: 'M' }],
changedLabels: [],
}),
);
assert.doesNotThrow(() =>
verifyPullRequestChangelog({
changedEntries: [{ path: 'src/main-entry.ts', status: 'M' }],
changedLabels: ['skip-changelog'],
}),
);
assert.throws(
() =>
verifyPullRequestChangelog({
changedEntries: [
{ path: 'src/main-entry.ts', status: 'M' },
{ path: 'changes/001.md', status: 'D' },
],
changedLabels: [],
}),
/requires a changelog fragment/,
);
assert.doesNotThrow(() =>
verifyPullRequestChangelog({
changedEntries: [
{ path: 'src/main-entry.ts', status: 'M' },
{ path: 'changes/001.md', status: 'A' },
],
changedLabels: [],
}),
);
});

566
scripts/build-changelog.ts Normal file
View File

@@ -0,0 +1,566 @@
import * as fs from 'node:fs';
import * as path from 'node:path';
import { execFileSync } from 'node:child_process';
type ChangelogFsDeps = {
existsSync?: (candidate: string) => boolean;
mkdirSync?: (candidate: string, options: { recursive: true }) => void;
readFileSync?: (candidate: string, encoding: BufferEncoding) => string;
readdirSync?: (candidate: string, options: { withFileTypes: true }) => fs.Dirent[];
rmSync?: (candidate: string) => void;
writeFileSync?: (candidate: string, content: string, encoding: BufferEncoding) => void;
log?: (message: string) => void;
};
type ChangelogOptions = {
cwd?: string;
date?: string;
version?: string;
deps?: ChangelogFsDeps;
};
type FragmentType = 'added' | 'changed' | 'fixed' | 'docs' | 'internal';
type ChangeFragment = {
area: string;
bullets: string[];
path: string;
type: FragmentType;
};
type PullRequestChangelogOptions = {
changedEntries: Array<{
path: string;
status: string;
}>;
changedLabels?: string[];
};
const RELEASE_NOTES_PATH = path.join('release', 'release-notes.md');
const CHANGELOG_HEADER = '# Changelog';
const CHANGE_TYPES: FragmentType[] = ['added', 'changed', 'fixed', 'docs', 'internal'];
const CHANGE_TYPE_HEADINGS: Record<FragmentType, string> = {
added: 'Added',
changed: 'Changed',
fixed: 'Fixed',
docs: 'Docs',
internal: 'Internal',
};
const SKIP_CHANGELOG_LABEL = 'skip-changelog';
function normalizeVersion(version: string): string {
return version.replace(/^v/, '');
}
function resolveDate(date?: string): string {
return date ?? new Date().toISOString().slice(0, 10);
}
function resolvePackageVersion(cwd: string, readFileSync: (candidate: string, encoding: BufferEncoding) => string): string {
const packageJsonPath = path.join(cwd, 'package.json');
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as { version?: string };
if (!packageJson.version) {
throw new Error(`Missing package.json version at ${packageJsonPath}`);
}
return normalizeVersion(packageJson.version);
}
function resolveVersion(
options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>,
): string {
const cwd = options.cwd ?? process.cwd();
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
return normalizeVersion(options.version ?? resolvePackageVersion(cwd, readFileSync));
}
function resolveChangesDir(cwd: string): string {
return path.join(cwd, 'changes');
}
function resolveFragmentPaths(
cwd: string,
deps?: ChangelogFsDeps,
): string[] {
const changesDir = resolveChangesDir(cwd);
const existsSync = deps?.existsSync ?? fs.existsSync;
const readdirSync = deps?.readdirSync ?? fs.readdirSync;
if (!existsSync(changesDir)) {
return [];
}
return readdirSync(changesDir, { withFileTypes: true })
.filter((entry) => entry.isFile() && entry.name.endsWith('.md') && entry.name.toLowerCase() !== 'readme.md')
.map((entry) => path.join(changesDir, entry.name))
.sort();
}
function normalizeFragmentBullets(content: string): string[] {
const lines = content
.split(/\r?\n/)
.map((line) => line.trim())
.filter(Boolean)
.map((line) => {
const match = /^[-*]\s+(.*)$/.exec(line);
return `- ${(match?.[1] ?? line).trim()}`;
});
if (lines.length === 0) {
throw new Error('Changelog fragment cannot be empty.');
}
return lines;
}
function parseFragmentMetadata(content: string, fragmentPath: string): {
area: string;
body: string;
type: FragmentType;
} {
const lines = content.split(/\r?\n/);
let index = 0;
while (index < lines.length && !(lines[index] ?? '').trim()) {
index += 1;
}
const metadata = new Map<string, string>();
while (index < lines.length) {
const trimmed = (lines[index] ?? '').trim();
if (!trimmed) {
index += 1;
break;
}
const match = /^([a-z]+):\s*(.+)$/.exec(trimmed);
if (!match) {
break;
}
const [, rawKey = '', rawValue = ''] = match;
metadata.set(rawKey, rawValue.trim());
index += 1;
}
const type = metadata.get('type');
if (!type || !CHANGE_TYPES.includes(type as FragmentType)) {
throw new Error(
`${fragmentPath} must declare type as one of: ${CHANGE_TYPES.join(', ')}.`,
);
}
const area = metadata.get('area');
if (!area) {
throw new Error(`${fragmentPath} must declare area.`);
}
const body = lines.slice(index).join('\n').trim();
if (!body) {
throw new Error(`${fragmentPath} must include at least one changelog bullet.`);
}
return {
area,
body,
type: type as FragmentType,
};
}
function readChangeFragments(
cwd: string,
deps?: ChangelogFsDeps,
): ChangeFragment[] {
const readFileSync = deps?.readFileSync ?? fs.readFileSync;
return resolveFragmentPaths(cwd, deps).map((fragmentPath) => {
const parsed = parseFragmentMetadata(readFileSync(fragmentPath, 'utf8'), fragmentPath);
return {
area: parsed.area,
bullets: normalizeFragmentBullets(parsed.body),
path: fragmentPath,
type: parsed.type,
};
});
}
function formatAreaLabel(area: string): string {
return area
.split(/[-_\s]+/)
.filter(Boolean)
.map((segment) => segment.charAt(0).toUpperCase() + segment.slice(1))
.join(' ');
}
function renderFragmentBullet(fragment: ChangeFragment, bullet: string): string {
return `- ${formatAreaLabel(fragment.area)}: ${bullet.replace(/^- /, '')}`;
}
function renderGroupedChanges(fragments: ChangeFragment[]): string {
const sections = CHANGE_TYPES.flatMap((type) => {
const typeFragments = fragments.filter((fragment) => fragment.type === type);
if (typeFragments.length === 0) {
return [];
}
const bullets = typeFragments
.flatMap((fragment) => fragment.bullets.map((bullet) => renderFragmentBullet(fragment, bullet)))
.join('\n');
return [`### ${CHANGE_TYPE_HEADINGS[type]}\n${bullets}`];
});
return sections.join('\n\n');
}
function buildReleaseSection(version: string, date: string, fragments: ChangeFragment[]): string {
if (fragments.length === 0) {
throw new Error('No changelog fragments found in changes/.');
}
return [`## v${version} (${date})`, '', renderGroupedChanges(fragments), ''].join(
'\n',
);
}
function ensureChangelogHeader(existingChangelog: string): string {
const trimmed = existingChangelog.trim();
if (!trimmed) {
return `${CHANGELOG_HEADER}\n`;
}
if (trimmed.startsWith(CHANGELOG_HEADER)) {
return `${trimmed}\n`;
}
return `${CHANGELOG_HEADER}\n\n${trimmed}\n`;
}
function prependReleaseSection(existingChangelog: string, releaseSection: string, version: string): string {
const normalizedExisting = ensureChangelogHeader(existingChangelog);
if (extractReleaseSectionBody(normalizedExisting, version) !== null) {
throw new Error(`CHANGELOG already contains a section for v${version}.`);
}
const withoutHeader = normalizedExisting.replace(/^# Changelog\s*/, '').trimStart();
const body = [releaseSection.trimEnd(), withoutHeader.trimEnd()].filter(Boolean).join('\n\n');
return `${CHANGELOG_HEADER}\n\n${body}\n`;
}
function escapeRegExp(value: string): string {
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
function extractReleaseSectionBody(changelog: string, version: string): string | null {
const headingPattern = new RegExp(
`^## v${escapeRegExp(normalizeVersion(version))} \\([^\\n]+\\)$`,
'm',
);
const headingMatch = headingPattern.exec(changelog);
if (!headingMatch) {
return null;
}
const bodyStart = headingMatch.index + headingMatch[0].length + 1;
const remaining = changelog.slice(bodyStart);
const nextHeadingMatch = /^## /m.exec(remaining);
const body = nextHeadingMatch ? remaining.slice(0, nextHeadingMatch.index) : remaining;
return body.trim();
}
export function resolveChangelogOutputPaths(options?: {
cwd?: string;
}): string[] {
const cwd = options?.cwd ?? process.cwd();
return [path.join(cwd, 'CHANGELOG.md')];
}
function renderReleaseNotes(changes: string): string {
return [
'## Highlights',
changes,
'',
'## Installation',
'',
'See the README and docs/installation guide for full setup steps.',
'',
'## Assets',
'',
'- Linux: `SubMiner.AppImage`',
'- macOS: `SubMiner-*.dmg` and `SubMiner-*.zip`',
'- Optional extras: `subminer-assets.tar.gz` and the `subminer` launcher',
'',
'Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.',
'',
].join('\n');
}
function writeReleaseNotesFile(
cwd: string,
changes: string,
deps?: ChangelogFsDeps,
): string {
const mkdirSync = deps?.mkdirSync ?? fs.mkdirSync;
const writeFileSync = deps?.writeFileSync ?? fs.writeFileSync;
const releaseNotesPath = path.join(cwd, RELEASE_NOTES_PATH);
mkdirSync(path.dirname(releaseNotesPath), { recursive: true });
writeFileSync(releaseNotesPath, renderReleaseNotes(changes), 'utf8');
return releaseNotesPath;
}
export function writeChangelogArtifacts(options?: ChangelogOptions): {
deletedFragmentPaths: string[];
outputPaths: string[];
releaseNotesPath: string;
} {
const cwd = options?.cwd ?? process.cwd();
const existsSync = options?.deps?.existsSync ?? fs.existsSync;
const mkdirSync = options?.deps?.mkdirSync ?? fs.mkdirSync;
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
const rmSync = options?.deps?.rmSync ?? fs.rmSync;
const writeFileSync = options?.deps?.writeFileSync ?? fs.writeFileSync;
const log = options?.deps?.log ?? console.log;
const version = resolveVersion(options ?? {});
const date = resolveDate(options?.date);
const fragments = readChangeFragments(cwd, options?.deps);
const releaseSection = buildReleaseSection(version, date, fragments);
const existingChangelogPath = path.join(cwd, 'CHANGELOG.md');
const existingChangelog = existsSync(existingChangelogPath)
? readFileSync(existingChangelogPath, 'utf8')
: '';
const outputPaths = resolveChangelogOutputPaths({ cwd });
const nextChangelog = prependReleaseSection(existingChangelog, releaseSection, version);
for (const outputPath of outputPaths) {
mkdirSync(path.dirname(outputPath), { recursive: true });
writeFileSync(outputPath, nextChangelog, 'utf8');
log(`Updated ${outputPath}`);
}
const releaseNotesPath = writeReleaseNotesFile(
cwd,
extractReleaseSectionBody(nextChangelog, version) ?? releaseSection,
options?.deps,
);
log(`Generated ${releaseNotesPath}`);
for (const fragment of fragments) {
rmSync(fragment.path);
log(`Removed ${fragment.path}`);
}
return {
deletedFragmentPaths: fragments.map((fragment) => fragment.path),
outputPaths,
releaseNotesPath,
};
}
export function verifyChangelogFragments(options?: ChangelogOptions): void {
readChangeFragments(options?.cwd ?? process.cwd(), options?.deps);
}
export function verifyChangelogReadyForRelease(options?: ChangelogOptions): void {
const cwd = options?.cwd ?? process.cwd();
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
const version = resolveVersion(options ?? {});
const pendingFragments = resolveFragmentPaths(cwd, options?.deps);
if (pendingFragments.length > 0) {
throw new Error(`Pending changelog fragments must be released first: ${pendingFragments.join(', ')}`);
}
const changelogPath = path.join(cwd, 'CHANGELOG.md');
if (!(options?.deps?.existsSync ?? fs.existsSync)(changelogPath)) {
throw new Error(`Missing ${changelogPath}`);
}
const changelog = readFileSync(changelogPath, 'utf8');
if (extractReleaseSectionBody(changelog, version) === null) {
throw new Error(`Missing CHANGELOG section for v${version}.`);
}
}
function isFragmentPath(candidate: string): boolean {
return /^changes\/.+\.md$/u.test(candidate) && !/\/?README\.md$/iu.test(candidate);
}
function isIgnoredPullRequestPath(candidate: string): boolean {
return (
candidate === 'CHANGELOG.md'
|| candidate === 'release/release-notes.md'
|| candidate === 'AGENTS.md'
|| candidate === 'README.md'
|| candidate.startsWith('changes/')
|| candidate.startsWith('docs/')
|| candidate.startsWith('.github/')
|| candidate.startsWith('backlog/')
);
}
export function verifyPullRequestChangelog(options: PullRequestChangelogOptions): void {
const labels = (options.changedLabels ?? []).map((label) => label.trim()).filter(Boolean);
if (labels.includes(SKIP_CHANGELOG_LABEL)) {
return;
}
const normalizedEntries = options.changedEntries
.map((entry) => ({
path: entry.path.trim(),
status: entry.status.trim().toUpperCase(),
}))
.filter((entry) => entry.path);
if (normalizedEntries.length === 0) {
return;
}
const hasFragment = normalizedEntries.some(
(entry) => entry.status !== 'D' && isFragmentPath(entry.path),
);
const requiresFragment = normalizedEntries.some(
(entry) => !isIgnoredPullRequestPath(entry.path),
);
if (requiresFragment && !hasFragment) {
throw new Error(
`This pull request changes release-relevant files and requires a changelog fragment under changes/ or the ${SKIP_CHANGELOG_LABEL} label.`,
);
}
}
function resolveChangedPathsFromGit(
cwd: string,
baseRef: string,
headRef: string,
): Array<{ path: string; status: string }> {
const output = execFileSync('git', ['diff', '--name-status', `${baseRef}...${headRef}`], {
cwd,
encoding: 'utf8',
});
return output
.split(/\r?\n/)
.map((line) => line.trim())
.filter(Boolean)
.map((line) => {
const [status = '', ...paths] = line.split(/\s+/);
return {
path: paths[paths.length - 1] ?? '',
status,
};
})
.filter((entry) => entry.path);
}
export function writeReleaseNotesForVersion(options?: ChangelogOptions): string {
const cwd = options?.cwd ?? process.cwd();
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
const version = resolveVersion(options ?? {});
const changelogPath = path.join(cwd, 'CHANGELOG.md');
const changelog = readFileSync(changelogPath, 'utf8');
const changes = extractReleaseSectionBody(changelog, version);
if (changes === null) {
throw new Error(`Missing CHANGELOG section for v${version}.`);
}
return writeReleaseNotesFile(cwd, changes, options?.deps);
}
function parseCliArgs(argv: string[]): {
baseRef?: string;
cwd?: string;
date?: string;
headRef?: string;
labels?: string;
version?: string;
} {
const parsed: {
baseRef?: string;
cwd?: string;
date?: string;
headRef?: string;
labels?: string;
version?: string;
} = {};
for (let index = 0; index < argv.length; index += 1) {
const current = argv[index];
const next = argv[index + 1];
if (current === '--cwd' && next) {
parsed.cwd = next;
index += 1;
continue;
}
if (current === '--date' && next) {
parsed.date = next;
index += 1;
continue;
}
if (current === '--version' && next) {
parsed.version = next;
index += 1;
continue;
}
if (current === '--base-ref' && next) {
parsed.baseRef = next;
index += 1;
continue;
}
if (current === '--head-ref' && next) {
parsed.headRef = next;
index += 1;
continue;
}
if (current === '--labels' && next) {
parsed.labels = next;
index += 1;
}
}
return parsed;
}
function main(): void {
const [command = 'build', ...argv] = process.argv.slice(2);
const options = parseCliArgs(argv);
if (command === 'build') {
writeChangelogArtifacts(options);
return;
}
if (command === 'check') {
verifyChangelogReadyForRelease(options);
return;
}
if (command === 'lint') {
verifyChangelogFragments(options);
return;
}
if (command === 'pr-check') {
verifyChangelogFragments(options);
verifyPullRequestChangelog({
changedLabels: options.labels?.split(',') ?? [],
changedEntries: resolveChangedPathsFromGit(
options.cwd ?? process.cwd(),
options.baseRef ?? 'origin/main',
options.headRef ?? 'HEAD',
),
});
return;
}
if (command === 'release-notes') {
writeReleaseNotesForVersion(options);
return;
}
throw new Error(`Unknown changelog command: ${command}`);
}
if (require.main === module) {
main();
}

View File

@@ -0,0 +1,30 @@
import { spawnSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
const env = { ...process.env };
for (const name of [
'CSC_LINK',
'CSC_KEY_PASSWORD',
'WIN_CSC_LINK',
'WIN_CSC_KEY_PASSWORD',
'CSC_NAME',
'WIN_CSC_NAME',
]) {
delete env[name];
}
env.CSC_IDENTITY_AUTO_DISCOVERY = 'false';
const electronBuilderCli = fileURLToPath(new URL('../node_modules/electron-builder/out/cli/cli.js', import.meta.url));
const result = spawnSync(process.execPath, [electronBuilderCli, '--win', 'nsis', 'zip', '--publish', 'never'], {
stdio: 'inherit',
env,
});
if (result.error) {
throw result.error;
}
process.exit(result.status ?? 1);

View File

@@ -13,13 +13,17 @@ const submodulePackageLockPath = path.join(submoduleDir, 'package-lock.json');
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const bunCommand = process.versions.bun ? process.execPath : 'bun';
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
function run(command, args, cwd) {
execFileSync(command, args, { cwd, stdio: 'inherit' });
}
function escapePowerShellString(value) {
return value.replaceAll("'", "''");
}
function readCommand(command, args, cwd) {
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
}
@@ -78,7 +82,7 @@ function ensureDependenciesInstalled() {
} catch {}
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
run(npmCommand, ['ci'], submoduleDir);
run(bunCommand, ['install', '--no-save'], submoduleDir);
fs.mkdirSync(nodeModulesDir, { recursive: true });
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
}
@@ -86,7 +90,7 @@ function ensureDependenciesInstalled() {
function installAndBuild() {
ensureDependenciesInstalled();
run(npmCommand, ['run', 'build', '--', '--target', 'chrome'], submoduleDir);
run(bunCommand, ['./dev/bin/build.js', '--target', 'chrome'], submoduleDir);
}
function extractBuild() {
@@ -96,7 +100,22 @@ function extractBuild() {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
try {
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
if (process.platform === 'win32') {
run(
'powershell.exe',
[
'-NoProfile',
'-NonInteractive',
'-ExecutionPolicy',
'Bypass',
'-Command',
`Expand-Archive -LiteralPath '${escapePowerShellString(zipPath)}' -DestinationPath '${escapePowerShellString(tempDir)}' -Force`,
],
repoRoot,
);
} else {
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
}
fs.rmSync(buildOutputDir, { recursive: true, force: true });
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
fs.cpSync(tempDir, buildOutputDir, { recursive: true });

View File

@@ -0,0 +1,101 @@
import fs from 'node:fs';
import path from 'node:path';
function normalizeCandidate(candidate) {
if (typeof candidate !== 'string') return '';
const trimmed = candidate.trim();
return trimmed.length > 0 ? trimmed : '';
}
function fileExists(candidate) {
try {
return fs.statSync(candidate).isFile();
} catch {
return false;
}
}
function unique(values) {
return Array.from(new Set(values.filter((value) => value.length > 0)));
}
function findWindowsBinary(repoRoot) {
const homeDir = process.env.HOME?.trim() || process.env.USERPROFILE?.trim() || '';
const appDataDir = process.env.APPDATA?.trim() || '';
const derivedLocalAppData =
appDataDir && /[\\/]Roaming$/i.test(appDataDir)
? appDataDir.replace(/[\\/]Roaming$/i, `${path.sep}Local`)
: '';
const localAppData =
process.env.LOCALAPPDATA?.trim() ||
derivedLocalAppData ||
(homeDir ? path.join(homeDir, 'AppData', 'Local') : '');
const programFiles = process.env.ProgramFiles?.trim() || 'C:\\Program Files';
const programFilesX86 = process.env['ProgramFiles(x86)']?.trim() || 'C:\\Program Files (x86)';
const candidates = unique([
normalizeCandidate(process.env.SUBMINER_BINARY_PATH),
normalizeCandidate(process.env.SUBMINER_APPIMAGE_PATH),
localAppData ? path.join(localAppData, 'Programs', 'SubMiner', 'SubMiner.exe') : '',
path.join(programFiles, 'SubMiner', 'SubMiner.exe'),
path.join(programFilesX86, 'SubMiner', 'SubMiner.exe'),
'C:\\SubMiner\\SubMiner.exe',
path.join(repoRoot, 'release', 'win-unpacked', 'SubMiner.exe'),
path.join(repoRoot, 'release', 'SubMiner', 'SubMiner.exe'),
path.join(repoRoot, 'release', 'SubMiner.exe'),
]);
return candidates.find((candidate) => fileExists(candidate)) || '';
}
function rewriteBinaryPath(configPath, binaryPath) {
const content = fs.readFileSync(configPath, 'utf8');
const normalizedPath = binaryPath.replace(/\r?\n/g, ' ').trim();
const updated = content.replace(/^binary_path=.*$/m, `binary_path=${normalizedPath}`);
if (updated !== content) {
fs.writeFileSync(configPath, updated, 'utf8');
}
}
function rewriteSocketPath(configPath, socketPath) {
const content = fs.readFileSync(configPath, 'utf8');
const normalizedPath = socketPath.replace(/\r?\n/g, ' ').trim();
const updated = content.replace(/^socket_path=.*$/m, `socket_path=${normalizedPath}`);
if (updated !== content) {
fs.writeFileSync(configPath, updated, 'utf8');
}
}
const [, , configPathArg, repoRootArg, platformArg] = process.argv;
const configPath = normalizeCandidate(configPathArg);
const repoRoot = normalizeCandidate(repoRootArg) || process.cwd();
const platform = normalizeCandidate(platformArg) || process.platform;
if (!configPath) {
console.error('[ERROR] Missing plugin config path');
process.exit(1);
}
if (!fileExists(configPath)) {
console.error(`[ERROR] Plugin config not found: ${configPath}`);
process.exit(1);
}
if (platform !== 'win32') {
console.log('[INFO] Skipping binary_path rewrite for non-Windows platform');
process.exit(0);
}
const windowsSocketPath = '\\\\.\\pipe\\subminer-socket';
rewriteSocketPath(configPath, windowsSocketPath);
const binaryPath = findWindowsBinary(repoRoot);
if (!binaryPath) {
console.warn(
`[WARN] Configured plugin socket_path=${windowsSocketPath} but could not detect SubMiner.exe; set binary_path manually or provide SUBMINER_BINARY_PATH`,
);
process.exit(0);
}
rewriteBinaryPath(configPath, binaryPath);
console.log(`[INFO] Configured plugin socket_path=${windowsSocketPath} binary_path=${binaryPath}`);

View File

@@ -20,6 +20,11 @@ private struct WindowGeometry {
let height: Int
}
private struct WindowState {
let geometry: WindowGeometry
let focused: Bool
}
private let targetMpvSocketPath: String? = {
guard CommandLine.arguments.count > 1 else {
return nil
@@ -136,7 +141,11 @@ private func geometryFromAXWindow(_ axWindow: AXUIElement) -> WindowGeometry? {
return geometry
}
private func geometryFromAccessibilityAPI() -> WindowGeometry? {
private func frontmostApplicationPid() -> pid_t? {
NSWorkspace.shared.frontmostApplication?.processIdentifier
}
private func windowStateFromAccessibilityAPI() -> WindowState? {
let runningApps = NSWorkspace.shared.runningApplications.filter { app in
guard let name = app.localizedName else {
return false
@@ -144,6 +153,8 @@ private func geometryFromAccessibilityAPI() -> WindowGeometry? {
return normalizedMpvName(name)
}
let frontmostPid = frontmostApplicationPid()
for app in runningApps {
let appElement = AXUIElementCreateApplication(app.processIdentifier)
if !windowHasTargetSocket(app.processIdentifier) {
@@ -173,7 +184,10 @@ private func geometryFromAccessibilityAPI() -> WindowGeometry? {
}
if let geometry = geometryFromAXWindow(window) {
return geometry
return WindowState(
geometry: geometry,
focused: frontmostPid == windowPid
)
}
}
}
@@ -181,11 +195,12 @@ private func geometryFromAccessibilityAPI() -> WindowGeometry? {
return nil
}
private func geometryFromCoreGraphics() -> WindowGeometry? {
private func windowStateFromCoreGraphics() -> WindowState? {
// Keep the CG fallback for environments without Accessibility permissions.
// Use on-screen layer-0 windows to avoid off-screen helpers/shadows.
let options: CGWindowListOption = [.optionOnScreenOnly, .excludeDesktopElements]
let windowList = CGWindowListCopyWindowInfo(options, kCGNullWindowID) as? [[String: Any]] ?? []
let frontmostPid = frontmostApplicationPid()
for window in windowList {
guard let ownerName = window[kCGWindowOwnerName as String] as? String,
@@ -226,14 +241,19 @@ private func geometryFromCoreGraphics() -> WindowGeometry? {
continue
}
return geometry
return WindowState(
geometry: geometry,
focused: frontmostPid == ownerPid
)
}
return nil
}
if let window = geometryFromAccessibilityAPI() ?? geometryFromCoreGraphics() {
print("\(window.x),\(window.y),\(window.width),\(window.height)")
if let window = windowStateFromAccessibilityAPI() ?? windowStateFromCoreGraphics() {
print(
"\(window.geometry.x),\(window.geometry.y),\(window.geometry.width),\(window.geometry.height),\(window.focused ? 1 : 0)"
)
} else {
print("not-found")
}

View File

@@ -0,0 +1,175 @@
param(
[ValidateSet('geometry')]
[string]$Mode = 'geometry',
[string]$SocketPath
)
$ErrorActionPreference = 'Stop'
try {
Add-Type -TypeDefinition @"
using System;
using System.Runtime.InteropServices;
public static class SubMinerWindowsHelper {
public delegate bool EnumWindowsProc(IntPtr hWnd, IntPtr lParam);
[StructLayout(LayoutKind.Sequential)]
public struct RECT {
public int Left;
public int Top;
public int Right;
public int Bottom;
}
[DllImport("user32.dll")]
public static extern bool EnumWindows(EnumWindowsProc lpEnumFunc, IntPtr lParam);
[DllImport("user32.dll")]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool IsWindowVisible(IntPtr hWnd);
[DllImport("user32.dll")]
public static extern bool IsIconic(IntPtr hWnd);
[DllImport("user32.dll")]
public static extern IntPtr GetForegroundWindow();
[DllImport("user32.dll", SetLastError = true)]
public static extern uint GetWindowThreadProcessId(IntPtr hWnd, out uint processId);
[DllImport("user32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool GetWindowRect(IntPtr hWnd, out RECT rect);
[DllImport("dwmapi.dll")]
public static extern int DwmGetWindowAttribute(IntPtr hwnd, int dwAttribute, out RECT pvAttribute, int cbAttribute);
}
"@
$DWMWA_EXTENDED_FRAME_BOUNDS = 9
function Get-WindowBounds {
param([IntPtr]$hWnd)
$rect = New-Object SubMinerWindowsHelper+RECT
$size = [System.Runtime.InteropServices.Marshal]::SizeOf($rect)
$dwmResult = [SubMinerWindowsHelper]::DwmGetWindowAttribute(
$hWnd,
$DWMWA_EXTENDED_FRAME_BOUNDS,
[ref]$rect,
$size
)
if ($dwmResult -ne 0) {
if (-not [SubMinerWindowsHelper]::GetWindowRect($hWnd, [ref]$rect)) {
return $null
}
}
$width = $rect.Right - $rect.Left
$height = $rect.Bottom - $rect.Top
if ($width -le 0 -or $height -le 0) {
return $null
}
return [PSCustomObject]@{
X = $rect.Left
Y = $rect.Top
Width = $width
Height = $height
Area = $width * $height
}
}
$commandLineByPid = @{}
if (-not [string]::IsNullOrWhiteSpace($SocketPath)) {
foreach ($process in Get-CimInstance Win32_Process) {
$commandLineByPid[[uint32]$process.ProcessId] = $process.CommandLine
}
}
$mpvMatches = New-Object System.Collections.Generic.List[object]
$foregroundWindow = [SubMinerWindowsHelper]::GetForegroundWindow()
$callback = [SubMinerWindowsHelper+EnumWindowsProc]{
param([IntPtr]$hWnd, [IntPtr]$lParam)
if (-not [SubMinerWindowsHelper]::IsWindowVisible($hWnd)) {
return $true
}
if ([SubMinerWindowsHelper]::IsIconic($hWnd)) {
return $true
}
[uint32]$windowProcessId = 0
[void][SubMinerWindowsHelper]::GetWindowThreadProcessId($hWnd, [ref]$windowProcessId)
if ($windowProcessId -eq 0) {
return $true
}
try {
$process = Get-Process -Id $windowProcessId -ErrorAction Stop
} catch {
return $true
}
if ($process.ProcessName -ine 'mpv') {
return $true
}
if (-not [string]::IsNullOrWhiteSpace($SocketPath)) {
$commandLine = $commandLineByPid[[uint32]$windowProcessId]
if ([string]::IsNullOrWhiteSpace($commandLine)) {
return $true
}
if (
($commandLine -notlike "*--input-ipc-server=$SocketPath*") -and
($commandLine -notlike "*--input-ipc-server $SocketPath*")
) {
return $true
}
}
$bounds = Get-WindowBounds -hWnd $hWnd
if ($null -eq $bounds) {
return $true
}
$mpvMatches.Add([PSCustomObject]@{
HWnd = $hWnd
X = $bounds.X
Y = $bounds.Y
Width = $bounds.Width
Height = $bounds.Height
Area = $bounds.Area
IsForeground = ($foregroundWindow -ne [IntPtr]::Zero -and $hWnd -eq $foregroundWindow)
})
return $true
}
[void][SubMinerWindowsHelper]::EnumWindows($callback, [IntPtr]::Zero)
$focusedMatch = $mpvMatches | Where-Object { $_.IsForeground } | Select-Object -First 1
if ($null -ne $focusedMatch) {
[Console]::Error.WriteLine('focus=focused')
} else {
[Console]::Error.WriteLine('focus=not-focused')
}
if ($mpvMatches.Count -eq 0) {
Write-Output 'not-found'
exit 0
}
$bestMatch = if ($null -ne $focusedMatch) {
$focusedMatch
} else {
$mpvMatches | Sort-Object -Property Area, Width, Height -Descending | Select-Object -First 1
}
Write-Output "$($bestMatch.X),$($bestMatch.Y),$($bestMatch.Width),$($bestMatch.Height)"
} catch {
[Console]::Error.WriteLine($_.Exception.Message)
exit 1
}

View File

@@ -0,0 +1,84 @@
import fs from 'node:fs';
import path from 'node:path';
import { execFileSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
const scriptDir = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(scriptDir, '..');
const rendererSourceDir = path.join(repoRoot, 'src', 'renderer');
const rendererOutputDir = path.join(repoRoot, 'dist', 'renderer');
const scriptsOutputDir = path.join(repoRoot, 'dist', 'scripts');
const windowsHelperSourcePath = path.join(scriptDir, 'get-mpv-window-windows.ps1');
const windowsHelperOutputPath = path.join(scriptsOutputDir, 'get-mpv-window-windows.ps1');
const macosHelperSourcePath = path.join(scriptDir, 'get-mpv-window-macos.swift');
const macosHelperBinaryPath = path.join(scriptsOutputDir, 'get-mpv-window-macos');
const macosHelperSourceCopyPath = path.join(scriptsOutputDir, 'get-mpv-window-macos.swift');
function ensureDir(dirPath) {
fs.mkdirSync(dirPath, { recursive: true });
}
function copyFile(sourcePath, outputPath) {
ensureDir(path.dirname(outputPath));
fs.copyFileSync(sourcePath, outputPath);
}
function copyRendererAssets() {
copyFile(path.join(rendererSourceDir, 'index.html'), path.join(rendererOutputDir, 'index.html'));
copyFile(path.join(rendererSourceDir, 'style.css'), path.join(rendererOutputDir, 'style.css'));
fs.cpSync(path.join(rendererSourceDir, 'fonts'), path.join(rendererOutputDir, 'fonts'), {
recursive: true,
force: true,
});
process.stdout.write(`Staged renderer assets in ${rendererOutputDir}\n`);
}
function stageWindowsHelper() {
copyFile(windowsHelperSourcePath, windowsHelperOutputPath);
process.stdout.write(`Staged Windows helper: ${windowsHelperOutputPath}\n`);
}
function fallbackToMacosSource() {
copyFile(macosHelperSourcePath, macosHelperSourceCopyPath);
process.stdout.write(`Staged macOS helper source fallback: ${macosHelperSourceCopyPath}\n`);
}
function shouldSkipMacosHelperBuild() {
return process.env.SUBMINER_SKIP_MACOS_HELPER_BUILD === '1';
}
function buildMacosHelper() {
if (shouldSkipMacosHelperBuild()) {
process.stdout.write('Skipping macOS helper build (SUBMINER_SKIP_MACOS_HELPER_BUILD=1)\n');
fallbackToMacosSource();
return;
}
if (process.platform !== 'darwin') {
process.stdout.write('Skipping macOS helper build (not on macOS)\n');
fallbackToMacosSource();
return;
}
try {
execFileSync('swiftc', ['-O', macosHelperSourcePath, '-o', macosHelperBinaryPath], {
stdio: 'inherit',
});
fs.chmodSync(macosHelperBinaryPath, 0o755);
process.stdout.write(`Built macOS helper: ${macosHelperBinaryPath}\n`);
} catch (error) {
process.stdout.write('Failed to compile macOS helper; using source fallback.\n');
fallbackToMacosSource();
if (error instanceof Error) {
process.stderr.write(`${error.message}\n`);
}
}
}
function main() {
copyRendererAssets();
stageWindowsHelper();
buildMacosHelper();
}
main();

View File

@@ -17,4 +17,5 @@ paths=(
"src"
)
exec bunx prettier "$@" "${paths[@]}"
BUN_BIN="$(command -v bun.exe || command -v bun)"
exec "$BUN_BIN" x prettier "$@" "${paths[@]}"

View File

@@ -1,8 +1,9 @@
import { readdirSync } from 'node:fs';
import { fileURLToPath } from 'node:url';
import { relative, resolve } from 'node:path';
import { spawnSync } from 'node:child_process';
const repoRoot = resolve(new URL('..', import.meta.url).pathname);
const repoRoot = resolve(fileURLToPath(new URL('..', import.meta.url)));
const lanes = {
'bun-src-full': {

View File

@@ -0,0 +1,223 @@
local function assert_equal(actual, expected, message)
if actual == expected then
return
end
error((message or "assert_equal failed") .. "\nexpected: " .. tostring(expected) .. "\nactual: " .. tostring(actual))
end
local function assert_true(condition, message)
if condition then
return
end
error(message or "assert_true failed")
end
local function with_env(env, callback)
local original_getenv = os.getenv
os.getenv = function(name)
local value = env[name]
if value ~= nil then
return value
end
return original_getenv(name)
end
local ok, result = pcall(callback)
os.getenv = original_getenv
if not ok then
error(result)
end
return result
end
local function create_binary_module(config)
local binary_module = dofile("plugin/subminer/binary.lua")
local entries = config.entries or {}
local binary = binary_module.create({
mp = config.mp,
utils = {
file_info = function(path)
local entry = entries[path]
if entry == "file" then
return { is_dir = false }
end
if entry == "dir" then
return { is_dir = true }
end
return nil
end,
join_path = function(...)
return table.concat({ ... }, "\\")
end,
},
opts = {
binary_path = config.binary_path or "",
},
state = {},
environment = {
is_windows = function()
return config.is_windows == true
end,
},
log = {
subminer_log = function() end,
},
})
return binary
end
do
local binary = create_binary_module({
is_windows = true,
binary_path = "C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner",
entries = {
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe"] = "file",
},
})
assert_equal(
binary.find_binary(),
"C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe",
"windows resolver should append .exe for configured binary_path"
)
end
do
local binary = create_binary_module({
is_windows = true,
mp = {
command_native = function(command)
local args = command.args or {}
if args[1] == "powershell.exe" then
return {
status = 0,
stdout = "C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe\n",
stderr = "",
}
end
return {
status = 1,
stdout = "",
stderr = "unexpected command",
}
end,
},
entries = {
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe"] = "file",
},
})
assert_equal(
binary.find_binary(),
"C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe",
"windows resolver should recover binary from running SubMiner process"
)
end
do
local binary = create_binary_module({
is_windows = true,
binary_path = "C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner",
entries = {
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner"] = "dir",
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe"] = "file",
},
})
assert_equal(
binary.find_binary(),
"C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe",
"windows resolver should accept install directory binary_path"
)
end
do
local resolved = with_env({
LOCALAPPDATA = "C:\\Users\\tester\\AppData\\Local",
HOME = "",
USERPROFILE = "C:\\Users\\tester",
ProgramFiles = "C:\\Program Files",
["ProgramFiles(x86)"] = "C:\\Program Files (x86)",
}, function()
local binary = create_binary_module({
is_windows = true,
entries = {
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe"] = "file",
},
})
return binary.find_binary()
end)
assert_equal(
resolved,
"C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe",
"windows auto-detection should probe LOCALAPPDATA install path"
)
end
do
local resolved = with_env({
APPDATA = "C:\\Users\\tester\\AppData\\Roaming",
LOCALAPPDATA = "",
HOME = "",
USERPROFILE = "C:\\Users\\tester",
ProgramFiles = "C:\\Program Files",
["ProgramFiles(x86)"] = "C:\\Program Files (x86)",
}, function()
local binary = create_binary_module({
is_windows = true,
entries = {
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe"] = "file",
},
})
return binary.find_binary()
end)
assert_equal(
resolved,
"C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe",
"windows auto-detection should derive Local install path from APPDATA"
)
end
do
local resolved = with_env({
SUBMINER_BINARY_PATH = "C:\\Portable\\SubMiner\\SubMiner",
}, function()
local binary = create_binary_module({
is_windows = true,
entries = {
["C:\\Portable\\SubMiner\\SubMiner.exe"] = "file",
},
})
return binary.find_binary()
end)
assert_equal(
resolved,
"C:\\Portable\\SubMiner\\SubMiner.exe",
"windows env override should resolve .exe suffix"
)
end
do
local binary = create_binary_module({
is_windows = true,
binary_path = "C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner",
entries = {
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner"] = "dir",
["C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe"] = "file",
},
})
assert_true(binary.ensure_binary_available() == true, "ensure_binary_available should cache discovered windows binary")
assert_equal(
binary.find_binary(),
"C:\\Users\\tester\\AppData\\Local\\Programs\\SubMiner\\SubMiner.exe",
"ensure_binary_available should not break follow-up lookup"
)
end
print("plugin windows binary resolver tests: OK")

View File

@@ -461,6 +461,36 @@ do
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
option_overrides = {
binary_path = binary_path,
auto_start = "yes",
auto_start_visible_overlay = "yes",
auto_start_pause_until_ready = "no",
socket_path = "/tmp/subminer-socket",
},
input_ipc_server = "/tmp/subminer-socket",
path = "https://www.youtube.com/watch?v=lJI7uL4JDkE",
media_title = "【文字起こし】マジで役立つ恋愛術【告radio】",
files = {
[binary_path] = true,
},
})
assert_true(recorded ~= nil, "plugin failed to load for URL overlay-start AniSkip scenario: " .. tostring(err))
fire_event(recorded, "file-loaded")
assert_true(find_start_call(recorded.async_calls) ~= nil, "URL auto-start should still invoke --start command")
assert_true(
not has_async_curl_for(recorded.async_calls, "myanimelist.net/search/prefix.json"),
"URL playback should skip AniSkip MAL lookup even after overlay-start"
)
assert_true(
not has_async_curl_for(recorded.async_calls, "api.aniskip.com"),
"URL playback should skip AniSkip API lookup even after overlay-start"
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
@@ -687,6 +717,30 @@ do
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
option_overrides = {
binary_path = binary_path,
auto_start = "yes",
auto_start_visible_overlay = "yes",
socket_path = "/tmp/subminer-socket",
},
input_ipc_server = "/tmp/subminer-socket",
media_title = "Random Movie",
files = {
[binary_path] = true,
},
})
assert_true(recorded ~= nil, "plugin failed to load for shutdown-preserve-background scenario: " .. tostring(err))
fire_event(recorded, "file-loaded")
fire_event(recorded, "shutdown")
assert_true(
find_control_call(recorded.async_calls, "--stop") == nil,
"mpv shutdown should not stop the background SubMiner process"
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
@@ -748,4 +802,29 @@ do
)
end
do
local recorded, err = run_plugin_scenario({
platform = "windows",
process_list = "",
option_overrides = {
binary_path = "C:/Users/test/AppData/Local/Programs/SubMiner/SubMiner.exe",
auto_start = "yes",
auto_start_visible_overlay = "yes",
socket_path = "/tmp/subminer-socket",
},
input_ipc_server = "\\\\.\\pipe\\subminer-socket",
media_title = "Random Movie",
files = {
["C:/Users/test/AppData/Local/Programs/SubMiner/SubMiner.exe"] = true,
},
})
assert_true(recorded ~= nil, "plugin failed to load for Windows legacy socket config scenario: " .. tostring(err))
fire_event(recorded, "file-loaded")
local start_call = find_start_call(recorded.async_calls)
assert_true(
start_call ~= nil,
"Windows plugin should normalize legacy /tmp socket_path values to the named pipe default"
)
end
print("plugin start gate regression tests: OK")

21
src/ai/client.test.ts Normal file
View File

@@ -0,0 +1,21 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { extractAiText, normalizeOpenAiBaseUrl } from './client';
test('normalizeOpenAiBaseUrl appends v1 when missing', () => {
assert.equal(normalizeOpenAiBaseUrl('https://openrouter.ai/api'), 'https://openrouter.ai/api/v1');
assert.equal(normalizeOpenAiBaseUrl('https://example.test/v1'), 'https://example.test/v1');
});
test('extractAiText joins OpenAI structured text parts', () => {
assert.equal(
extractAiText([
{ type: 'text', text: 'hello ' },
{ type: 'text', text: 'world' },
{ type: 'image', text: 'ignored' },
]),
'hello world',
);
assert.equal(extractAiText(' plain text '), 'plain text');
});

118
src/ai/client.ts Normal file
View File

@@ -0,0 +1,118 @@
import { exec as execCallback } from 'node:child_process';
import { promisify } from 'node:util';
import axios from 'axios';
import type { AiConfig } from '../types';
const DEFAULT_AI_BASE_URL = 'https://openrouter.ai/api';
const DEFAULT_AI_MODEL = 'openai/gpt-4o-mini';
const DEFAULT_AI_TIMEOUT_MS = 15_000;
const exec = promisify(execCallback);
export function extractAiText(content: unknown): string {
if (typeof content === 'string') {
return content.trim();
}
if (!Array.isArray(content)) {
return '';
}
const parts: string[] = [];
for (const item of content) {
if (
item &&
typeof item === 'object' &&
'type' in item &&
(item as { type?: unknown }).type === 'text' &&
'text' in item &&
typeof (item as { text?: unknown }).text === 'string'
) {
parts.push((item as { text: string }).text);
}
}
return parts.join('').trim();
}
export function normalizeOpenAiBaseUrl(baseUrl: string): string {
const trimmed = baseUrl.trim().replace(/\/+$/, '');
if (/\/v1$/i.test(trimmed)) {
return trimmed;
}
return `${trimmed}/v1`;
}
export async function resolveAiApiKey(
config: Pick<AiConfig, 'apiKey' | 'apiKeyCommand'>,
): Promise<string | null> {
if (config.apiKey && config.apiKey.trim()) {
return config.apiKey.trim();
}
if (config.apiKeyCommand && config.apiKeyCommand.trim()) {
try {
const { stdout } = await exec(config.apiKeyCommand, { timeout: 10_000 });
const key = stdout.trim();
return key.length > 0 ? key : null;
} catch {
return null;
}
}
return null;
}
export interface AiChatMessage {
role: 'system' | 'user' | 'assistant';
content: string;
}
export interface AiChatCompletionRequest {
apiKey: string;
baseUrl?: string;
model?: string;
timeoutMs?: number;
messages: AiChatMessage[];
}
export interface AiChatCompletionCallbacks {
logWarning: (message: string) => void;
}
export async function requestAiChatCompletion(
request: AiChatCompletionRequest,
callbacks: AiChatCompletionCallbacks,
): Promise<string | null> {
if (!request.apiKey.trim()) {
return null;
}
const baseUrl = normalizeOpenAiBaseUrl(request.baseUrl || DEFAULT_AI_BASE_URL);
const model = request.model || DEFAULT_AI_MODEL;
const timeoutMs = request.timeoutMs ?? DEFAULT_AI_TIMEOUT_MS;
try {
const response = await axios.post(
`${baseUrl}/chat/completions`,
{
model,
temperature: 0,
messages: request.messages,
},
{
headers: {
Authorization: `Bearer ${request.apiKey}`,
'Content-Type': 'application/json',
},
timeout: timeoutMs,
},
);
const content = (response.data as { choices?: unknown[] })?.choices?.[0] as
| { message?: { content?: unknown } }
| undefined;
return extractAiText(content?.message?.content) || null;
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown AI request error';
callbacks.logWarning(`AI request failed: ${message}`);
return null;
}
}

27
src/ai/config.ts Normal file
View File

@@ -0,0 +1,27 @@
import type { AiConfig, AiFeatureConfig } from '../types';
function trimToOverride(value: string | undefined): string | undefined {
if (typeof value !== 'string') return undefined;
const trimmed = value.trim();
return trimmed.length > 0 ? trimmed : undefined;
}
export function mergeAiConfig(
sharedConfig: AiConfig | undefined,
featureConfig?: AiFeatureConfig | boolean | null,
): AiConfig {
const overrides =
featureConfig && typeof featureConfig === 'object' ? featureConfig : undefined;
const modelOverride = trimToOverride(overrides?.model);
const systemPromptOverride = trimToOverride(overrides?.systemPrompt);
return {
enabled: sharedConfig?.enabled,
apiKey: sharedConfig?.apiKey,
apiKeyCommand: sharedConfig?.apiKeyCommand,
baseUrl: sharedConfig?.baseUrl,
model: modelOverride ?? sharedConfig?.model,
systemPrompt: systemPromptOverride ?? sharedConfig?.systemPrompt,
requestTimeoutMs: sharedConfig?.requestTimeoutMs,
};
}

View File

@@ -21,6 +21,7 @@ import { SubtitleTimingTracker } from './subtitle-timing-tracker';
import { MediaGenerator } from './media-generator';
import path from 'path';
import {
AiConfig,
AnkiConnectConfig,
KikuDuplicateCardInfo,
KikuFieldGroupingChoice,
@@ -135,6 +136,7 @@ export class AnkiIntegration {
private noteUpdateWorkflow: NoteUpdateWorkflow;
private fieldGroupingWorkflow: FieldGroupingWorkflow;
private runtime: AnkiIntegrationRuntime;
private aiConfig: AiConfig;
constructor(
config: AnkiConnectConfig,
@@ -147,8 +149,10 @@ export class AnkiIntegration {
duplicate: KikuDuplicateCardInfo;
}) => Promise<KikuFieldGroupingChoice>,
knownWordCacheStatePath?: string,
aiConfig: AiConfig = {},
) {
this.config = normalizeAnkiIntegrationConfig(config);
this.aiConfig = { ...aiConfig };
this.client = new AnkiConnectClient(this.config.url!);
this.mediaGenerator = new MediaGenerator();
this.timingTracker = timingTracker;
@@ -253,6 +257,7 @@ export class AnkiIntegration {
private createCardCreationService(): CardCreationService {
return new CardCreationService({
getConfig: () => this.config,
getAiConfig: () => this.aiConfig,
getTimingTracker: () => this.timingTracker,
getMpvClient: () => this.mpvClient,
getDeck: () => this.config.deck,
@@ -1096,7 +1101,10 @@ export class AnkiIntegration {
return requiredFields.every((fieldName) => this.hasFieldValue(noteInfo, fieldName));
}
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>, aiConfig?: AiConfig): void {
if (aiConfig) {
this.aiConfig = { ...aiConfig };
}
this.runtime.applyRuntimeConfigPatch(patch);
}

View File

@@ -0,0 +1,64 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { resolveSentenceBackText } from './ai';
test('resolveSentenceBackText returns secondary subtitle when ai is disabled', async () => {
const result = await resolveSentenceBackText(
{
sentence: '日本語',
secondarySubText: 'existing translation',
aiEnabled: false,
aiConfig: {},
},
{
logWarning: () => undefined,
},
);
assert.equal(result, 'existing translation');
});
test('resolveSentenceBackText uses shared ai config when enabled', async () => {
const result = await resolveSentenceBackText(
{
sentence: '日本語',
secondarySubText: '',
aiEnabled: true,
aiConfig: {
enabled: true,
apiKey: 'abc',
model: 'openai/gpt-4o-mini',
},
},
{
logWarning: () => undefined,
translateSentence: async (request) => {
assert.equal(request.apiKey, 'abc');
assert.equal(request.model, 'openai/gpt-4o-mini');
return 'translated';
},
},
);
assert.equal(result, 'translated');
});
test('resolveSentenceBackText falls back to sentence when ai translation fails with no secondary subtitle', async () => {
const result = await resolveSentenceBackText(
{
sentence: '日本語',
aiEnabled: true,
aiConfig: {
enabled: true,
apiKey: 'abc',
},
},
{
logWarning: () => undefined,
translateSentence: async () => null,
},
);
assert.equal(result, '日本語');
});

Some files were not shown because too many files have changed in this diff Show More