mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-03-20 12:11:28 -07:00
Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
a34a7489db
|
|||
|
e59192bbe1
|
|||
|
e0f82d28f0
|
|||
|
a0521aeeaf
|
|||
|
2127f759ca
|
|||
|
5e787183d0
|
|||
|
81ca31b899
|
|||
|
e2a7597b4f
|
|||
|
2e59c21078
|
|||
|
7b5ab3294d
|
|||
|
2bbf38f987
|
|||
|
f09c91494d
|
|||
|
58ec9b76e0
|
|||
|
7a196f69d6
|
|||
| c799a8de3c | |||
|
34d2dce8dc
|
|||
|
3a22a97761
|
|||
|
962243e959
|
|||
|
021010a338
|
|||
|
4c0575afe0
|
|||
|
9e46176519
|
|||
|
f10e905dbd
|
|||
| e4aa8ff907 | |||
|
a6ece5388a
|
|||
|
6a44b54b51
|
|||
|
93cd688625
|
|||
|
8e319a417d
|
|||
|
38034db1e4
|
|||
|
f775f90360
|
|||
|
55dff6ced7
|
|||
|
d0c11d347b
|
3
.github/pull_request_template.md
vendored
Normal file
3
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
## Checklist
|
||||||
|
|
||||||
|
- [ ] Added a changelog fragment in `changes/`, or this PR is labeled `skip-changelog`
|
||||||
13
.github/workflows/ci.yml
vendored
13
.github/workflows/ci.yml
vendored
@@ -13,6 +13,7 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
@@ -20,11 +21,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22.12.0
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -39,6 +35,13 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: bun install --frozen-lockfile
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Lint changelog fragments
|
||||||
|
run: bun run changelog:lint
|
||||||
|
|
||||||
|
- name: Enforce pull request changelog fragments (`skip-changelog` label bypass)
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
run: bun run changelog:pr-check --base-ref "origin/${{ github.base_ref }}" --head-ref "HEAD" --labels "${{ join(github.event.pull_request.labels.*.name, ',') }}"
|
||||||
|
|
||||||
- name: Build (TypeScript check)
|
- name: Build (TypeScript check)
|
||||||
# Keep explicit typecheck for fast fail before full build/bundle.
|
# Keep explicit typecheck for fast fail before full build/bundle.
|
||||||
run: bun run typecheck
|
run: bun run typecheck
|
||||||
|
|||||||
135
.github/workflows/release.yml
vendored
135
.github/workflows/release.yml
vendored
@@ -26,11 +26,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22.12.0
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -85,11 +80,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22.12.0
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -113,8 +103,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Build AppImage
|
- name: Build AppImage
|
||||||
run: bun run build:appimage
|
run: bun run build:appimage
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build unversioned AppImage
|
- name: Build unversioned AppImage
|
||||||
run: |
|
run: |
|
||||||
@@ -147,11 +135,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22.12.0
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -196,7 +179,6 @@ jobs:
|
|||||||
- name: Build signed + notarized macOS artifacts
|
- name: Build signed + notarized macOS artifacts
|
||||||
run: bun run build:mac
|
run: bun run build:mac
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||||
@@ -211,8 +193,56 @@ jobs:
|
|||||||
release/*.dmg
|
release/*.dmg
|
||||||
release/*.zip
|
release/*.zip
|
||||||
|
|
||||||
|
build-windows:
|
||||||
|
needs: [quality-gate]
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Setup Bun
|
||||||
|
uses: oven-sh/setup-bun@v2
|
||||||
|
with:
|
||||||
|
bun-version: 1.3.5
|
||||||
|
|
||||||
|
- name: Cache dependencies
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.bun/install/cache
|
||||||
|
node_modules
|
||||||
|
vendor/texthooker-ui/node_modules
|
||||||
|
vendor/subminer-yomitan/node_modules
|
||||||
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Build texthooker-ui
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
Set-Location vendor/texthooker-ui
|
||||||
|
bun install
|
||||||
|
bun run build
|
||||||
|
|
||||||
|
- name: Build unsigned Windows artifacts
|
||||||
|
run: bun run build:win:unsigned
|
||||||
|
|
||||||
|
- name: Upload Windows artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: windows
|
||||||
|
path: |
|
||||||
|
release/*.exe
|
||||||
|
release/*.zip
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
release:
|
release:
|
||||||
needs: [build-linux, build-macos]
|
needs: [build-linux, build-macos, build-windows]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -232,6 +262,12 @@ jobs:
|
|||||||
name: macos
|
name: macos
|
||||||
path: release
|
path: release
|
||||||
|
|
||||||
|
- name: Download Windows artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: windows
|
||||||
|
path: release
|
||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
@@ -270,7 +306,7 @@ jobs:
|
|||||||
- name: Generate checksums
|
- name: Generate checksums
|
||||||
run: |
|
run: |
|
||||||
shopt -s nullglob
|
shopt -s nullglob
|
||||||
files=(release/*.AppImage release/*.dmg release/*.zip release/*.tar.gz dist/launcher/subminer)
|
files=(release/*.AppImage release/*.dmg release/*.exe release/*.zip release/*.tar.gz dist/launcher/subminer)
|
||||||
if [ "${#files[@]}" -eq 0 ]; then
|
if [ "${#files[@]}" -eq 0 ]; then
|
||||||
echo "No release artifacts found for checksum generation."
|
echo "No release artifacts found for checksum generation."
|
||||||
exit 1
|
exit 1
|
||||||
@@ -281,23 +317,11 @@ jobs:
|
|||||||
id: version
|
id: version
|
||||||
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Generate changelog
|
- name: Verify changelog is ready for tagged release
|
||||||
id: changelog
|
run: bun run changelog:check --version "${{ steps.version.outputs.VERSION }}"
|
||||||
run: |
|
|
||||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
- name: Generate release notes from changelog
|
||||||
if [ -n "$PREV_TAG" ]; then
|
run: bun run changelog:release-notes --version "${{ steps.version.outputs.VERSION }}"
|
||||||
CHANGES=$(git log --pretty=format:"- %s" ${PREV_TAG}..HEAD)
|
|
||||||
else
|
|
||||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
|
||||||
if [ "$COMMIT_COUNT" -gt 10 ]; then
|
|
||||||
CHANGES=$(git log --pretty=format:"- %s" HEAD~10..HEAD)
|
|
||||||
else
|
|
||||||
CHANGES=$(git log --pretty=format:"- %s")
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
echo "CHANGES<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
echo "$CHANGES" >> $GITHUB_OUTPUT
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Publish Release
|
- name: Publish Release
|
||||||
env:
|
env:
|
||||||
@@ -305,52 +329,23 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
cat > release-body.md <<'EOF'
|
|
||||||
## Changes
|
|
||||||
${{ steps.changelog.outputs.CHANGES }}
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
### AppImage (Recommended)
|
|
||||||
1. Download the AppImage below
|
|
||||||
2. Make it executable: `chmod +x SubMiner.AppImage`
|
|
||||||
3. Run: `./SubMiner.AppImage`
|
|
||||||
|
|
||||||
### macOS
|
|
||||||
1. Download `subminer-*.dmg`
|
|
||||||
2. Open the DMG and drag `SubMiner.app` into `/Applications`
|
|
||||||
3. If needed, use the ZIP artifact as an alternative
|
|
||||||
|
|
||||||
### Manual Installation
|
|
||||||
See the [README](https://github.com/${{ github.repository }}#installation) for manual installation instructions.
|
|
||||||
|
|
||||||
### Optional Assets (config example + mpv plugin + rofi theme)
|
|
||||||
1. Download `subminer-assets.tar.gz`
|
|
||||||
2. Extract and copy `config.example.jsonc` to `~/.config/SubMiner/config.jsonc`
|
|
||||||
3. Copy `plugin/subminer/` directory contents to `~/.config/mpv/scripts/`
|
|
||||||
4. Copy `plugin/subminer.conf` to `~/.config/mpv/script-opts/`
|
|
||||||
5. Copy `assets/themes/subminer.rasi` to:
|
|
||||||
- Linux: `~/.local/share/SubMiner/themes/subminer.rasi`
|
|
||||||
- macOS: `~/Library/Application Support/SubMiner/themes/subminer.rasi`
|
|
||||||
|
|
||||||
Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.
|
|
||||||
EOF
|
|
||||||
|
|
||||||
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
|
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
|
||||||
# Do not pass the prerelease flag here; gh defaults to a normal release.
|
# Do not pass the prerelease flag here; gh defaults to a normal release.
|
||||||
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
||||||
|
--draft=false \
|
||||||
--title "${{ steps.version.outputs.VERSION }}" \
|
--title "${{ steps.version.outputs.VERSION }}" \
|
||||||
--notes-file release-body.md
|
--notes-file release/release-notes.md
|
||||||
else
|
else
|
||||||
gh release create "${{ steps.version.outputs.VERSION }}" \
|
gh release create "${{ steps.version.outputs.VERSION }}" \
|
||||||
--title "${{ steps.version.outputs.VERSION }}" \
|
--title "${{ steps.version.outputs.VERSION }}" \
|
||||||
--notes-file release-body.md
|
--notes-file release/release-notes.md
|
||||||
fi
|
fi
|
||||||
|
|
||||||
shopt -s nullglob
|
shopt -s nullglob
|
||||||
artifacts=(
|
artifacts=(
|
||||||
release/*.AppImage
|
release/*.AppImage
|
||||||
release/*.dmg
|
release/*.dmg
|
||||||
|
release/*.exe
|
||||||
release/*.zip
|
release/*.zip
|
||||||
release/*.tar.gz
|
release/*.tar.gz
|
||||||
release/SHA256SUMS.txt
|
release/SHA256SUMS.txt
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -37,4 +37,4 @@ tests/*
|
|||||||
.worktrees/
|
.worktrees/
|
||||||
.codex/*
|
.codex/*
|
||||||
.agents/*
|
.agents/*
|
||||||
docs/*
|
favicon.png
|
||||||
|
|||||||
58
AGENTS.md
58
AGENTS.md
@@ -1,3 +1,60 @@
|
|||||||
|
# AGENTS.MD
|
||||||
|
|
||||||
|
## PR Feedback
|
||||||
|
|
||||||
|
- Active PR: `gh pr view --json number,title,url --jq '"PR #\\(.number): \\(.title)\\n\\(.url)"'`.
|
||||||
|
- PR comments: `gh pr view …` + `gh api …/comments --paginate`.
|
||||||
|
- Replies: cite fix + file/line; resolve threads only after fix lands.
|
||||||
|
- When merging a PR: thank the contributor in `CHANGELOG.md`.
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
- User-visible PRs: add one fragment in `changes/*.md`.
|
||||||
|
- Fragment format:
|
||||||
|
`type: added|changed|fixed|docs|internal`
|
||||||
|
`area: <short-area>`
|
||||||
|
blank line
|
||||||
|
`- bullet`
|
||||||
|
- `changes/README.md`: instructions only; generator ignores it.
|
||||||
|
- No release-note entry wanted: use PR label `skip-changelog`.
|
||||||
|
- CI runs `bun run changelog:lint` + `bun run changelog:pr-check` on PRs.
|
||||||
|
- Release prep: `bun run changelog:build`, review `CHANGELOG.md` + `release/release-notes.md`, commit generated changelog + fragment deletions, then tag.
|
||||||
|
- Release CI expects committed changelog entry already present; do not rely on tag job to invent notes.
|
||||||
|
|
||||||
|
## Flow & Runtime
|
||||||
|
|
||||||
|
- Use repo’s package manager/runtime; no swaps w/o approval.
|
||||||
|
- Use Codex background for long jobs; tmux only for interactive/persistent (debugger/server).
|
||||||
|
|
||||||
|
## Build / Test
|
||||||
|
|
||||||
|
- Before handoff: run full gate (lint/typecheck/tests/docs).
|
||||||
|
- CI red: `gh run list/view`, rerun, fix, push, repeat til green.
|
||||||
|
- Keep it observable (logs, panes, tails, MCP/browser tools).
|
||||||
|
- Release: read `docs/RELEASING.md`
|
||||||
|
|
||||||
|
## Git
|
||||||
|
|
||||||
|
- Safe by default: `git status/diff/log`. Push only when user asks.
|
||||||
|
- `git checkout` ok for PR review / explicit request.
|
||||||
|
- Branch changes require user consent.
|
||||||
|
- Destructive ops forbidden unless explicit (`reset --hard`, `clean`, `restore`, `rm`, …).
|
||||||
|
- Don’t delete/rename unexpected stuff; stop + ask.
|
||||||
|
- No repo-wide S/R scripts; keep edits small/reviewable.
|
||||||
|
- Avoid manual `git stash`; if Git auto-stashes during pull/rebase, that’s fine (hint, not hard guardrail).
|
||||||
|
- If user types a command (“pull and push”), that’s consent for that command.
|
||||||
|
- No amend unless asked.
|
||||||
|
- Big review: `git --no-pager diff --color=never`.
|
||||||
|
- Multi-agent: check `git status/diff` before edits; ship small commits.
|
||||||
|
|
||||||
|
## Language/Stack Notes
|
||||||
|
|
||||||
|
- Swift: use workspace helper/daemon; validate `swift build` + tests; keep concurrency attrs right.
|
||||||
|
- TypeScript: use repo PM; keep files small; follow existing patterns.
|
||||||
|
|
||||||
|
## macOS Permissions / Signing (TCC)
|
||||||
|
|
||||||
|
- Never re-sign / ad-hoc sign / change bundle ID as “debug” without explicit ok (can mess TCC).
|
||||||
|
|
||||||
<!-- BACKLOG.MD MCP GUIDELINES START -->
|
<!-- BACKLOG.MD MCP GUIDELINES START -->
|
||||||
|
|
||||||
@@ -17,6 +74,7 @@ This project uses Backlog.md MCP for all task and project management activities.
|
|||||||
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
|
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
|
||||||
|
|
||||||
These guides cover:
|
These guides cover:
|
||||||
|
|
||||||
- Decision framework for when to create tasks
|
- Decision framework for when to create tasks
|
||||||
- Search-first workflow to avoid duplicates
|
- Search-first workflow to avoid duplicates
|
||||||
- Links to detailed guides for task creation, execution, and finalization
|
- Links to detailed guides for task creation, execution, and finalization
|
||||||
|
|||||||
102
CHANGELOG.md
Normal file
102
CHANGELOG.md
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## v0.5.5 (2026-03-09)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Overlay: Added `f` as the default overlay fullscreen toggle and changed the default AniSkip intro-jump key to `Tab`.
|
||||||
|
- Dictionary: Aligned AniList character dictionary generation more closely with the upstream reference by preserving duplicate shared names across characters, skipping characters without native Japanese names, restoring richer character info fields, and using upstream-style role mapping plus hint-aware kanji readings.
|
||||||
|
- Startup: Ordered startup OSD messages so tokenization loads first, annotation loading appears next if still pending, and character dictionary sync progress waits until annotation loading finishes.
|
||||||
|
- Dictionary: Added a visible startup OSD step for merged character-dictionary building so long rebuilds show progress before the later import/upload phase.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Dictionary: Fixed AniList media guessing for character dictionary auto-sync by using filename-only `guessit` input and preserving multi-part guessit titles instead of truncating them to the first segment.
|
||||||
|
- Dictionary: Refresh the current subtitle after character dictionary auto-sync completes so newly imported character names highlight on the active line instead of waiting for the next subtitle change.
|
||||||
|
- Dictionary: Show character dictionary auto-sync progress on the mpv OSD without sending desktop notifications.
|
||||||
|
- Dictionary: Keep character dictionary auto-sync non-blocking during startup by letting snapshot/build work run in parallel and delaying only the Yomitan import/settings phase until current-media tokenization is already ready.
|
||||||
|
- Overlay: Fixed visible overlay keyboard handling so pressing `Tab` still reaches mpv and triggers the default AniSkip skip-intro binding while the overlay has focus.
|
||||||
|
- Plugin: Fix Windows mpv plugin binary override lookup so `SUBMINER_BINARY_PATH` still resolves to `SubMiner.exe` when no AppImage override is set.
|
||||||
|
|
||||||
|
## v0.5.3 (2026-03-09)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Release: Publish unsigned Windows `.exe` and `.zip` artifacts directly from release CI instead of routing them through SignPath.
|
||||||
|
- Release: Added `bun run build:win:unsigned` for explicit local unsigned Windows packaging.
|
||||||
|
|
||||||
|
## v0.5.2 (2026-03-09)
|
||||||
|
|
||||||
|
### Internal
|
||||||
|
|
||||||
|
- Release: Pinned the Windows SignPath submission workflow to an explicit artifact-configuration slug instead of relying on the SignPath project's default configuration.
|
||||||
|
|
||||||
|
## v0.5.1 (2026-03-09)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Launcher: Removed the YouTube subtitle generation mode switch so YouTube playback always preloads subtitles before mpv starts.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Launcher: Hardened YouTube AI subtitle fixing so fenced SRT output and text-only one-cue-per-block responses can still be applied without losing original cue timing.
|
||||||
|
- Launcher: Skipped AniSkip lookup during URL playback and YouTube subtitle-preload playback, limiting AniSkip to local file targets where it can actually resolve anime metadata.
|
||||||
|
- Launcher: Keep the background SubMiner process running after a launcher-managed mpv session exits so the next mpv instance can reconnect without restarting the app.
|
||||||
|
- Launcher: Reuse prior tokenization readiness after the background app is already warm so reopening a video does not pause again waiting for duplicate warmup completion.
|
||||||
|
- Windows: Acquire the app single-instance lock earlier so Windows overlay/video launches reuse the running background SubMiner process instead of booting a second full app and repeating startup warmups.
|
||||||
|
|
||||||
|
## v0.3.0 (2026-03-05)
|
||||||
|
|
||||||
|
- Added keyboard-driven Yomitan navigation and popup controls, including optional auto-pause.
|
||||||
|
- Added subtitle/jump keyboard handling fixes for smoother subtitle playback control.
|
||||||
|
- Improved Anki/Yomitan reliability with stronger Yomitan proxy syncing and safer extension refresh logic.
|
||||||
|
- Added Subsync `replace` option and deterministic retime naming for subtitle workflows.
|
||||||
|
- Moved aniskip resolution to launcher-script options for better control.
|
||||||
|
- Tuned tokenizer frequency highlighting filters for improved term visibility.
|
||||||
|
- Added release build quality-of-life for CLI publish (`gh`-based clobber upload).
|
||||||
|
- Removed docs Plausible integration and cleaned associated tracker settings.
|
||||||
|
|
||||||
|
## v0.2.3 (2026-03-02)
|
||||||
|
|
||||||
|
- Added performance and tokenization optimizations (faster warmup, persistent MeCab usage, reduced enrichment lookups).
|
||||||
|
- Added subtitle controls for no-jump delay shifts.
|
||||||
|
- Improved subtitle highlight logic with priority and reliability fixes.
|
||||||
|
- Fixed plugin loading behavior to keep OSD visible during startup.
|
||||||
|
- Fixed Jellyfin remote resume behavior and improved autoplay/tokenization interaction.
|
||||||
|
- Updated startup flow to load dictionaries asynchronously and unblock first tokenization sooner.
|
||||||
|
|
||||||
|
## v0.2.2 (2026-03-01)
|
||||||
|
|
||||||
|
- Improved subtitle highlighting reliability for frequency modes.
|
||||||
|
- Fixed Jellyfin misc info formatting cleanup.
|
||||||
|
- Version bump maintenance for 0.2.2.
|
||||||
|
|
||||||
|
## v0.2.1 (2026-03-01)
|
||||||
|
|
||||||
|
- Delivered Jellyfin and Subsync fixes from release patch cycle.
|
||||||
|
- Version bump maintenance for 0.2.1.
|
||||||
|
|
||||||
|
## v0.2.0 (2026-03-01)
|
||||||
|
|
||||||
|
- Added task-related release work for the overlay 2.0 cycle.
|
||||||
|
- Introduced Overlay 2.0.
|
||||||
|
- Improved release automation reliability.
|
||||||
|
|
||||||
|
## v0.1.2 (2026-02-24)
|
||||||
|
|
||||||
|
- Added encrypted AniList token handling and default GNOME keyring support.
|
||||||
|
- Added launcher passthrough for password-store flows (Jellyfin path).
|
||||||
|
- Updated docs for auth and integration behavior.
|
||||||
|
- Version bump maintenance for 0.1.2.
|
||||||
|
|
||||||
|
## v0.1.1 (2026-02-23)
|
||||||
|
|
||||||
|
- Fixed overlay modal focus handling (`grab input`) behavior.
|
||||||
|
- Version bump maintenance for 0.1.1.
|
||||||
|
|
||||||
|
## v0.1.0 (2026-02-23)
|
||||||
|
|
||||||
|
- Bootstrapped Electron runtime, services, and composition model.
|
||||||
|
- Added runtime asset packaging and dependency vendoring.
|
||||||
|
- Added project docs baseline, setup guides, architecture notes, and submodule/runtime assets.
|
||||||
|
- Added CI release job dependency ordering fixes before launcher build.
|
||||||
52
Makefile
52
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: help deps build build-launcher install build-linux build-macos build-macos-unsigned clean install-linux install-macos install-plugin uninstall uninstall-linux uninstall-macos print-dirs pretty ensure-bun generate-config generate-example-config dev-start dev-start-macos dev-watch dev-watch-macos dev-toggle dev-stop
|
.PHONY: help deps build build-launcher install build-linux build-macos build-macos-unsigned clean install-linux install-macos install-windows install-plugin uninstall uninstall-linux uninstall-macos uninstall-windows print-dirs pretty ensure-bun generate-config generate-example-config dev-start dev-start-macos dev-watch dev-watch-macos dev-toggle dev-stop
|
||||||
|
|
||||||
APP_NAME := subminer
|
APP_NAME := subminer
|
||||||
THEME_SOURCE := assets/themes/subminer.rasi
|
THEME_SOURCE := assets/themes/subminer.rasi
|
||||||
@@ -20,11 +20,6 @@ MACOS_DATA_DIR ?= $(HOME)/Library/Application Support/SubMiner
|
|||||||
MACOS_APP_DIR ?= $(HOME)/Applications
|
MACOS_APP_DIR ?= $(HOME)/Applications
|
||||||
MACOS_APP_DEST ?= $(MACOS_APP_DIR)/SubMiner.app
|
MACOS_APP_DEST ?= $(MACOS_APP_DIR)/SubMiner.app
|
||||||
|
|
||||||
# mpv plugin install directories.
|
|
||||||
MPV_CONFIG_DIR ?= $(HOME)/.config/mpv
|
|
||||||
MPV_SCRIPTS_DIR ?= $(MPV_CONFIG_DIR)/scripts
|
|
||||||
MPV_SCRIPT_OPTS_DIR ?= $(MPV_CONFIG_DIR)/script-opts
|
|
||||||
|
|
||||||
# If building from source, the AppImage will typically land in release/.
|
# If building from source, the AppImage will typically land in release/.
|
||||||
APPIMAGE_SRC := $(firstword $(wildcard release/SubMiner-*.AppImage))
|
APPIMAGE_SRC := $(firstword $(wildcard release/SubMiner-*.AppImage))
|
||||||
MACOS_APP_SRC := $(firstword $(wildcard release/*.app release/*/*.app))
|
MACOS_APP_SRC := $(firstword $(wildcard release/*.app release/*/*.app))
|
||||||
@@ -41,6 +36,17 @@ else
|
|||||||
PLATFORM := unknown
|
PLATFORM := unknown
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
WINDOWS_APPDATA ?= $(if $(APPDATA),$(subst \,/,$(APPDATA)),$(HOME)/AppData/Roaming)
|
||||||
|
|
||||||
|
# mpv plugin install directories.
|
||||||
|
ifeq ($(PLATFORM),windows)
|
||||||
|
MPV_CONFIG_DIR ?= $(WINDOWS_APPDATA)/mpv
|
||||||
|
else
|
||||||
|
MPV_CONFIG_DIR ?= $(HOME)/.config/mpv
|
||||||
|
endif
|
||||||
|
MPV_SCRIPTS_DIR ?= $(MPV_CONFIG_DIR)/scripts
|
||||||
|
MPV_SCRIPT_OPTS_DIR ?= $(MPV_CONFIG_DIR)/script-opts
|
||||||
|
|
||||||
help:
|
help:
|
||||||
@printf '%s\n' \
|
@printf '%s\n' \
|
||||||
"Targets:" \
|
"Targets:" \
|
||||||
@@ -58,6 +64,7 @@ help:
|
|||||||
" dev-stop Stop a running local Electron app" \
|
" dev-stop Stop a running local Electron app" \
|
||||||
" install-linux Install Linux wrapper/theme/app artifacts" \
|
" install-linux Install Linux wrapper/theme/app artifacts" \
|
||||||
" install-macos Install macOS wrapper/theme/app artifacts" \
|
" install-macos Install macOS wrapper/theme/app artifacts" \
|
||||||
|
" install-windows Install Windows mpv plugin artifacts" \
|
||||||
" install-plugin Install mpv Lua plugin and plugin config" \
|
" install-plugin Install mpv Lua plugin and plugin config" \
|
||||||
" generate-config Generate ~/.config/SubMiner/config.jsonc from centralized defaults" \
|
" generate-config Generate ~/.config/SubMiner/config.jsonc from centralized defaults" \
|
||||||
"" \
|
"" \
|
||||||
@@ -65,6 +72,7 @@ help:
|
|||||||
" deps Install JS dependencies (root + texthooker-ui)" \
|
" deps Install JS dependencies (root + texthooker-ui)" \
|
||||||
" uninstall-linux Remove Linux install artifacts" \
|
" uninstall-linux Remove Linux install artifacts" \
|
||||||
" uninstall-macos Remove macOS install artifacts" \
|
" uninstall-macos Remove macOS install artifacts" \
|
||||||
|
" uninstall-windows Remove Windows mpv plugin artifacts" \
|
||||||
" print-dirs Show resolved install locations" \
|
" print-dirs Show resolved install locations" \
|
||||||
"" \
|
"" \
|
||||||
"Variables:" \
|
"Variables:" \
|
||||||
@@ -74,7 +82,7 @@ help:
|
|||||||
" LINUX_DATA_DIR=... Override Linux app data dir" \
|
" LINUX_DATA_DIR=... Override Linux app data dir" \
|
||||||
" MACOS_DATA_DIR=... Override macOS app data dir" \
|
" MACOS_DATA_DIR=... Override macOS app data dir" \
|
||||||
" MACOS_APP_DIR=... Override macOS app install dir (default: $$HOME/Applications)" \
|
" MACOS_APP_DIR=... Override macOS app install dir (default: $$HOME/Applications)" \
|
||||||
" MPV_CONFIG_DIR=... Override mpv config dir (default: $$HOME/.config/mpv)"
|
" MPV_CONFIG_DIR=... Override mpv config dir (default: $$HOME/.config/mpv or %APPDATA%/mpv on Windows)"
|
||||||
|
|
||||||
print-dirs:
|
print-dirs:
|
||||||
@printf '%s\n' \
|
@printf '%s\n' \
|
||||||
@@ -85,6 +93,10 @@ print-dirs:
|
|||||||
"MACOS_DATA_DIR=$(MACOS_DATA_DIR)" \
|
"MACOS_DATA_DIR=$(MACOS_DATA_DIR)" \
|
||||||
"MACOS_APP_DIR=$(MACOS_APP_DIR)" \
|
"MACOS_APP_DIR=$(MACOS_APP_DIR)" \
|
||||||
"MACOS_APP_DEST=$(MACOS_APP_DEST)" \
|
"MACOS_APP_DEST=$(MACOS_APP_DEST)" \
|
||||||
|
"WINDOWS_APPDATA=$(WINDOWS_APPDATA)" \
|
||||||
|
"MPV_CONFIG_DIR=$(MPV_CONFIG_DIR)" \
|
||||||
|
"MPV_SCRIPTS_DIR=$(MPV_SCRIPTS_DIR)" \
|
||||||
|
"MPV_SCRIPT_OPTS_DIR=$(MPV_SCRIPT_OPTS_DIR)" \
|
||||||
"APPIMAGE_SRC=$(APPIMAGE_SRC)" \
|
"APPIMAGE_SRC=$(APPIMAGE_SRC)" \
|
||||||
"MACOS_APP_SRC=$(MACOS_APP_SRC)" \
|
"MACOS_APP_SRC=$(MACOS_APP_SRC)" \
|
||||||
"MACOS_ZIP_SRC=$(MACOS_ZIP_SRC)"
|
"MACOS_ZIP_SRC=$(MACOS_ZIP_SRC)"
|
||||||
@@ -105,6 +117,7 @@ build:
|
|||||||
@case "$(PLATFORM)" in \
|
@case "$(PLATFORM)" in \
|
||||||
linux) $(MAKE) --no-print-directory build-linux ;; \
|
linux) $(MAKE) --no-print-directory build-linux ;; \
|
||||||
macos) $(MAKE) --no-print-directory build-macos ;; \
|
macos) $(MAKE) --no-print-directory build-macos ;; \
|
||||||
|
windows) printf '%s\n' "[INFO] Windows builds run via: bun run build:win" ;; \
|
||||||
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
|
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -113,6 +126,7 @@ install:
|
|||||||
@case "$(PLATFORM)" in \
|
@case "$(PLATFORM)" in \
|
||||||
linux) $(MAKE) --no-print-directory install-linux ;; \
|
linux) $(MAKE) --no-print-directory install-linux ;; \
|
||||||
macos) $(MAKE) --no-print-directory install-macos ;; \
|
macos) $(MAKE) --no-print-directory install-macos ;; \
|
||||||
|
windows) $(MAKE) --no-print-directory install-windows ;; \
|
||||||
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
|
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -210,18 +224,31 @@ install-macos: build-launcher
|
|||||||
fi
|
fi
|
||||||
@printf '%s\n' "Installed to:" " $(BINDIR)/subminer" " $(MACOS_DATA_DIR)/themes/$(THEME_FILE)" " $(MACOS_APP_DEST)"
|
@printf '%s\n' "Installed to:" " $(BINDIR)/subminer" " $(MACOS_DATA_DIR)/themes/$(THEME_FILE)" " $(MACOS_APP_DEST)"
|
||||||
|
|
||||||
|
install-windows:
|
||||||
|
@printf '%s\n' "[INFO] Installing Windows mpv plugin artifacts"
|
||||||
|
@$(MAKE) --no-print-directory install-plugin
|
||||||
|
|
||||||
install-plugin:
|
install-plugin:
|
||||||
@printf '%s\n' "[INFO] Installing mpv plugin artifacts"
|
@printf '%s\n' "[INFO] Installing mpv plugin artifacts"
|
||||||
@install -d "$(MPV_SCRIPTS_DIR)"
|
@install -d "$(MPV_SCRIPTS_DIR)"
|
||||||
@rm -f "$(MPV_SCRIPTS_DIR)/subminer.lua"
|
@rm -f "$(MPV_SCRIPTS_DIR)/subminer.lua" "$(MPV_SCRIPTS_DIR)/subminer-loader.lua"
|
||||||
@install -d "$(MPV_SCRIPTS_DIR)/subminer"
|
@install -d "$(MPV_SCRIPTS_DIR)/subminer"
|
||||||
@install -d "$(MPV_SCRIPT_OPTS_DIR)"
|
@install -d "$(MPV_SCRIPT_OPTS_DIR)"
|
||||||
@cp -R ./plugin/subminer/. "$(MPV_SCRIPTS_DIR)/subminer/"
|
@cp -R ./plugin/subminer/. "$(MPV_SCRIPTS_DIR)/subminer/"
|
||||||
@install -m 0644 "./$(PLUGIN_CONF)" "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
|
@install -m 0644 "./$(PLUGIN_CONF)" "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
|
||||||
|
@if [ "$(PLATFORM)" = "windows" ]; then \
|
||||||
|
bun ./scripts/configure-plugin-binary-path.mjs "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf" "$(CURDIR)" win32; \
|
||||||
|
fi
|
||||||
@printf '%s\n' "Installed to:" " $(MPV_SCRIPTS_DIR)/subminer/main.lua" " $(MPV_SCRIPTS_DIR)/subminer/" " $(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
|
@printf '%s\n' "Installed to:" " $(MPV_SCRIPTS_DIR)/subminer/main.lua" " $(MPV_SCRIPTS_DIR)/subminer/" " $(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
|
||||||
|
|
||||||
# Uninstall behavior kept unchanged by default.
|
uninstall:
|
||||||
uninstall: uninstall-linux
|
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"
|
||||||
|
@case "$(PLATFORM)" in \
|
||||||
|
linux) $(MAKE) --no-print-directory uninstall-linux ;; \
|
||||||
|
macos) $(MAKE) --no-print-directory uninstall-macos ;; \
|
||||||
|
windows) $(MAKE) --no-print-directory uninstall-windows ;; \
|
||||||
|
*) printf '%s\n' "[ERROR] Unsupported OS for this Makefile target: $(PLATFORM)"; exit 1 ;; \
|
||||||
|
esac
|
||||||
|
|
||||||
uninstall-linux:
|
uninstall-linux:
|
||||||
@rm -f "$(BINDIR)/subminer" "$(BINDIR)/SubMiner.AppImage"
|
@rm -f "$(BINDIR)/subminer" "$(BINDIR)/SubMiner.AppImage"
|
||||||
@@ -233,3 +260,8 @@ uninstall-macos:
|
|||||||
@rm -f "$(MACOS_DATA_DIR)/themes/$(THEME_FILE)"
|
@rm -f "$(MACOS_DATA_DIR)/themes/$(THEME_FILE)"
|
||||||
@rm -rf "$(MACOS_APP_DEST)"
|
@rm -rf "$(MACOS_APP_DEST)"
|
||||||
@printf '%s\n' "Removed:" " $(BINDIR)/subminer" " $(MACOS_DATA_DIR)/themes/$(THEME_FILE)" " $(MACOS_APP_DEST)"
|
@printf '%s\n' "Removed:" " $(BINDIR)/subminer" " $(MACOS_DATA_DIR)/themes/$(THEME_FILE)" " $(MACOS_APP_DEST)"
|
||||||
|
|
||||||
|
uninstall-windows:
|
||||||
|
@rm -rf "$(MPV_SCRIPTS_DIR)/subminer"
|
||||||
|
@rm -f "$(MPV_SCRIPTS_DIR)/subminer.lua" "$(MPV_SCRIPTS_DIR)/subminer-loader.lua" "$(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
|
||||||
|
@printf '%s\n' "Removed:" " $(MPV_SCRIPTS_DIR)/subminer" " $(MPV_SCRIPT_OPTS_DIR)/subminer.conf"
|
||||||
|
|||||||
34
README.md
34
README.md
@@ -5,7 +5,7 @@
|
|||||||
<br /><br />
|
<br /><br />
|
||||||
|
|
||||||
[](https://www.gnu.org/licenses/gpl-3.0)
|
[](https://www.gnu.org/licenses/gpl-3.0)
|
||||||
[]()
|
[]()
|
||||||
[](https://docs.subminer.moe)
|
[](https://docs.subminer.moe)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@@ -29,7 +29,7 @@ SubMiner is an Electron overlay that sits on top of mpv. It turns your video pla
|
|||||||
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
|
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
|
||||||
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
|
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
|
||||||
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read
|
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read
|
||||||
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave (`subtitleStyle.autoPauseVideoOnHover`)
|
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave
|
||||||
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
||||||
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
||||||
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
||||||
@@ -54,15 +54,22 @@ chmod +x ~/.local/bin/subminer
|
|||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
|
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
|
||||||
|
|
||||||
**From source** or **macOS** — initialize submodules first (`git submodule update --init --recursive`). Source builds now also require Node.js 22 + npm because bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
|
**macOS (DMG/ZIP):** download the latest packaged build from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest) and drag `SubMiner.app` into `/Applications`.
|
||||||
|
|
||||||
|
**Windows (Installer/ZIP):** download the latest `SubMiner-<version>.exe` installer or portable `.zip` from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest). Keep `mpv` installed and available on `PATH`.
|
||||||
|
|
||||||
|
**From source** — initialize submodules first (`git submodule update --init --recursive`). Bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`, so source builds only need Bun for the JS toolchain. Packaged macOS and Windows installs do not require Bun. Windows installer builds go through `electron-builder`; its bundled `app-builder-lib` NSIS templates already use the third-party `WinShell` plugin for shortcut AppUserModelID assignment, and the `WinShell.dll` binary is supplied by electron-builder's cached `nsis-resources` bundle, so `bun run build:win` does not need a separate repo-local plugin install step. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
|
||||||
|
|
||||||
### 2. Launch the app once
|
### 2. Launch the app once
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Linux
|
||||||
SubMiner.AppImage
|
SubMiner.AppImage
|
||||||
```
|
```
|
||||||
|
|
||||||
On first launch, SubMiner now:
|
On macOS, launch `SubMiner.app`. On Windows, launch `SubMiner.exe` from the Start menu or install directory.
|
||||||
|
|
||||||
|
On first launch, SubMiner:
|
||||||
|
|
||||||
- starts in the tray/background
|
- starts in the tray/background
|
||||||
- creates the default config directory and `config.jsonc`
|
- creates the default config directory and `config.jsonc`
|
||||||
@@ -70,8 +77,6 @@ On first launch, SubMiner now:
|
|||||||
- can install the mpv plugin to the default mpv scripts location for you
|
- can install the mpv plugin to the default mpv scripts location for you
|
||||||
- links directly to Yomitan settings so you can install dictionaries before finishing setup
|
- links directly to Yomitan settings so you can install dictionaries before finishing setup
|
||||||
|
|
||||||
Existing installs that already have a valid config plus at least one Yomitan dictionary are auto-detected as complete and will not be re-prompted.
|
|
||||||
|
|
||||||
### 3. Finish setup
|
### 3. Finish setup
|
||||||
|
|
||||||
- click `Install mpv plugin` if you want the default plugin auto-start flow
|
- click `Install mpv plugin` if you want the default plugin auto-start flow
|
||||||
@@ -92,31 +97,22 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
|
|||||||
|
|
||||||
| Required | Optional |
|
| Required | Optional |
|
||||||
| ------------------------------------------ | -------------------------------------------------- |
|
| ------------------------------------------ | -------------------------------------------------- |
|
||||||
| `bun`, `node` 22, `npm` | |
|
| `bun` (source builds, Linux `subminer`) | |
|
||||||
| `mpv` with IPC socket | `yt-dlp` |
|
| `mpv` with IPC socket | `yt-dlp` |
|
||||||
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
|
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
|
||||||
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
|
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
|
||||||
| Linux: `hyprctl` or `xdotool` + `xwininfo` | `chafa`, `ffmpegthumbnailer` |
|
| Linux: `hyprctl` or `xdotool` + `xwininfo` | `chafa`, `ffmpegthumbnailer` |
|
||||||
| macOS: Accessibility permission | |
|
| macOS: Accessibility permission | |
|
||||||
|
|
||||||
|
Windows builds use native window tracking and do not require the Linux compositor helper tools.
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
|
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
|
||||||
|
|
||||||
## Testing
|
|
||||||
|
|
||||||
- Run `bun run test` or `bun run test:fast` for the default fast lane: config/core coverage plus representative entry/runtime, Anki integration, and main runtime checks.
|
|
||||||
- Run `bun run test:full` for the maintained test surface: Bun-compatible `src/**` coverage, Bun-compatible launcher unit coverage, and a Node compatibility lane for suites that depend on Electron named exports or `node:sqlite` behavior.
|
|
||||||
- Run `bun run test:node:compat` directly when you only need the Node-backed compatibility slice: `ipc`, `anki-jimaku-ipc`, `overlay-manager`, `config-validation`, `startup-config`, and runtime registry coverage.
|
|
||||||
- Run `bun run test:env` for environment-specific verification: launcher smoke/plugin checks plus the SQLite-backed immersion tracker lane.
|
|
||||||
- Run `bun run test:immersion:sqlite` when you specifically need real SQLite persistence coverage under Node with `--experimental-sqlite`.
|
|
||||||
- Run `bun run test:subtitle` for the maintained `alass`/`ffsubsync` subtitle surface.
|
|
||||||
|
|
||||||
The Bun-managed discovery lanes intentionally exclude a small set of suites that are currently Node-only because of Bun runtime/tooling gaps rather than product behavior: Electron named-export tests in `src/core/services/ipc.test.ts`, `src/core/services/anki-jimaku-ipc.test.ts`, and `src/core/services/overlay-manager.test.ts`, plus runtime/config tests in `src/main/config-validation.test.ts`, `src/main/runtime/startup-config.test.ts`, and `src/main/runtime/registry.test.ts`. `bun run test:node:compat` keeps those suites in the standard workflow instead of leaving them untracked.
|
|
||||||
|
|
||||||
## Acknowledgments
|
## Acknowledgments
|
||||||
|
|
||||||
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
|
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan), and JLPT tags from [yomitan-jlpt-vocab](https://github.com/stephenmk/yomitan-jlpt-vocab).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 141 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
id: TASK-117
|
||||||
|
title: Prepare initial Windows release docs and version bump
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 15:17'
|
||||||
|
updated_date: '2026-03-08 15:17'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- docs
|
||||||
|
- windows
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- README.md
|
||||||
|
- ../subminer-docs/installation.md
|
||||||
|
- ../subminer-docs/usage.md
|
||||||
|
- ../subminer-docs/changelog.md
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Prepare the initial packaged Windows release by bumping the app version and refreshing the release-facing README/backlog/docs surfaces so install and direct-command guidance no longer reads Linux-only.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 App version is bumped for the Windows release cut
|
||||||
|
- [x] #2 README and sibling docs describe Windows packaged installation alongside Linux/macOS guidance
|
||||||
|
- [x] #3 Backlog records the release-doc/version update with the modified references
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Bump the package version for the release cut.
|
||||||
|
2. Update the root README install/start guidance to mention Windows packaged builds.
|
||||||
|
3. Patch the sibling docs repo installation, usage, and changelog pages for the Windows release.
|
||||||
|
4. Record the work in Backlog and run targeted verification on the touched surfaces.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
The public README still advertised Linux/macOS only, while the sibling docs had Windows-specific runtime notes but no actual Windows install section and several direct-command examples still assumed `SubMiner.AppImage`.
|
||||||
|
|
||||||
|
Bumped `package.json` to `0.5.0`, expanded the README platform/install copy to include Windows, added a Windows install section to `../subminer-docs/installation.md`, clarified in `../subminer-docs/usage.md` that direct packaged-app examples use `SubMiner.exe` on Windows, and added a `v0.5.0` changelog entry covering the initial Windows release plus the latest overlay behavior polish.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared the initial Windows release documentation pass and version bump. `package.json` now reports `0.5.0`. The root `README.md` now advertises Linux, macOS, and Windows support, includes Windows packaged-install guidance, and clarifies first-launch behavior across platforms. In the sibling docs repo, `installation.md` now includes a dedicated Windows install section, `usage.md` explains that direct packaged-app examples use `SubMiner.exe` on Windows, and `changelog.md` now includes the `v0.5.0` release notes for the initial Windows build and recent overlay behavior changes.
|
||||||
|
|
||||||
|
Verification: targeted `bun run tsc --noEmit -p tsconfig.typecheck.json` in the app repo and `bun run docs:build` in `../subminer-docs`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
id: TASK-117
|
||||||
|
title: >-
|
||||||
|
Replace YouTube subtitle generation with pure TypeScript pipeline and shared
|
||||||
|
AI config
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 03:16'
|
||||||
|
updated_date: '2026-03-08 03:35'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/anki-integration/ai.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/types.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-integrations.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/config/resolve/subtitle-domains.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/config.example.jsonc
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Replace the launcher YouTube subtitle generation flow with a pure TypeScript pipeline that prefers real downloadable YouTube subtitles, never uses YouTube auto-generated subtitles, locally generates missing tracks with whisper.cpp, and can optionally fix generated subtitles via a shared OpenAI-compatible AI provider config. This feature also introduces a breaking config cleanup: move provider settings to a new top-level ai section and reduce ankiConnect.ai to a boolean feature toggle.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Launcher YouTube subtitle generation prefers downloadable manual YouTube subtitles, never uses YouTube auto-generated subtitles, and locally generates only missing tracks with whisper.cpp.
|
||||||
|
- [x] #2 Generated whisper subtitle tracks can optionally be post-processed with an OpenAI-compatible AI provider using shared top-level ai config, with validation and fallback to raw whisper output on failure.
|
||||||
|
- [x] #3 Configuration is updated so top-level ai is canonical shared provider config, ankiConnect.ai is boolean-only, and youtubeSubgen includes whisperVadModel, whisperThreads, and fixWithAi.
|
||||||
|
- [x] #4 Launcher CLI/config parsing, config example, and docs reflect the new breaking config shape with no migration layer.
|
||||||
|
- [x] #5 Automated tests cover the new YouTube generation behavior, AI-fix fallback/validation behavior, shared AI config usage, and breaking config validation.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Introduce canonical top-level ai config plus youtubeSubgen runtime knobs (whisperVadModel, whisperThreads, fixWithAi) and convert ankiConnect.ai to a boolean-only toggle across types, defaults, validation, option registries, launcher config parsing, and config example/docs.
|
||||||
|
2. Extract shared OpenAI-compatible AI client helpers from the current Anki translation code, including base URL normalization, API key / apiKeyCommand resolution, timeout handling, and response text extraction.
|
||||||
|
3. Update Anki translation flow and hot-reload/runtime plumbing to consume global ai config while treating ankiConnect.ai as a feature gate only.
|
||||||
|
4. Replace launcher/youtube.ts with a modular launcher/youtube pipeline that fetches only manual YouTube subtitles, generates missing tracks locally with ffmpeg + whisper.cpp + optional VAD/thread controls, and preserves preprocess/automatic playback behavior.
|
||||||
|
5. Add optional AI subtitle-fix processing for whisper-generated tracks using the shared ai client, with strict SRT batching/validation and fallback to raw whisper output on provider or format failure.
|
||||||
|
6. Expand automated coverage for config validation, shared AI usage, launcher config parsing, and YouTube subtitle generation behavior including removal of yt-dlp auto-subs and AI-fix fallback rules.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Implemented pure TypeScript launcher/youtube pipeline modules for manual subtitle fetch, audio extraction, whisper runs, SRT utilities, and optional AI subtitle fixing. Removed yt-dlp auto-subtitle usage from the generation path.
|
||||||
|
|
||||||
|
Added shared top-level ai config plus shared AI client helpers; converted ankiConnect.ai to a boolean feature gate and updated Anki runtime wiring to consume global ai config.
|
||||||
|
|
||||||
|
Updated launcher config parsing, config template sections, and config.example.jsonc for the breaking config shape including youtubeSubgen.whisperVadModel, youtubeSubgen.whisperThreads, and youtubeSubgen.fixWithAi.
|
||||||
|
|
||||||
|
Verification: bun run test:config:src passed; targeted AI/Anki/runtime tests passed; bun run typecheck passed. bun run test:launcher:unit:src reported one unrelated existing failure in launcher/aniskip-metadata.test.ts (resolveAniSkipMetadataForFile resolves MAL id and intro payload).
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Replaced the launcher YouTube subtitle flow with a modular TypeScript pipeline that prefers manual YouTube subtitles, transcribes only missing tracks with whisper.cpp, and can optionally post-fix whisper output through a shared OpenAI-compatible AI client with strict SRT validation/fallback. Introduced canonical top-level ai config, reduced ankiConnect.ai to a boolean feature gate, updated launcher/config parsing and checked-in config artifacts, and added coverage for YouTube orchestration, whisper args, SRT validation, AI fix behavior, and breaking config validation.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
id: TASK-117.1
|
||||||
|
title: Harden AI subtitle fix against non-SRT model responses
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- '@codex'
|
||||||
|
created_date: '2026-03-08 08:22'
|
||||||
|
updated_date: '2026-03-08 08:25'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/subtitle-fix-ai.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/srt.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/subtitle-fix-ai.test.ts
|
||||||
|
parent_task_id: TASK-117
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Prevent optional YouTube AI subtitle post-processing from bailing out whenever the model returns usable cue text in a non-SRT wrapper or text-only format. The launcher should recover safe cases, preserve original timing, and fall back cleanly when the response cannot be mapped back to the source cues.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 AI subtitle fixing accepts safe AI responses that omit SRT framing but still provide one corrected text payload per original cue while preserving original cue timing.
|
||||||
|
- [x] #2 AI subtitle fixing still rejects responses that cannot be mapped back to the original cue batch without guessing and falls back to the raw subtitle file with a warning.
|
||||||
|
- [x] #3 Automated tests cover wrapped-SRT and text-only AI responses plus an unrecoverable invalid response case.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add failing tests in launcher/youtube/subtitle-fix-ai.test.ts for three cases: wrapped valid SRT, text-only one-block-per-cue output, and unrecoverable invalid output.
|
||||||
|
2. Extend launcher/youtube/subtitle-fix-ai.ts with a small response-normalization path that first strips markdown/code-fence wrappers, then accepts deterministic text-only cue batches only when they map 1:1 to the original cues without changing timestamps.
|
||||||
|
3. Keep existing safety rules: preserve cue count and timing, log a warning, and fall back to the raw subtitle file when normalization cannot recover a trustworthy batch.
|
||||||
|
4. Run focused launcher unit tests for subtitle-fix-ai and SRT parsing; expand only if the change affects adjacent behavior.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Implemented deterministic AI subtitle-response recovery for fenced SRT, embedded SRT payloads, and text-only 1:1 cue batches while preserving original timing and existing fallback behavior.
|
||||||
|
|
||||||
|
Verification: bun test launcher/youtube/*.test.ts passed; bun run typecheck passed; repo-wide format check still reports unrelated pre-existing warnings in launcher/youtube/orchestrator.ts and scripts/build-changelog*.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Hardened the launcher AI subtitle-fix path so it can recover deterministic non-SRT model responses instead of immediately falling back. Added `parseAiSubtitleFixResponse` in `launcher/youtube/subtitle-fix-ai.ts` to normalize markdown-fenced or embedded SRT payloads first, then accept text-only responses only when they map 1:1 onto the original cue batch and preserve source timings. Added regression coverage in `launcher/youtube/subtitle-fix-ai.test.ts` for fenced SRT, text-only cue batches, and unrecoverable invalid output, plus a changelog fragment in `changes/task-117.1.md`.
|
||||||
|
|
||||||
|
Verification: `bun test launcher/youtube/*.test.ts`, `bun run typecheck`, `bunx prettier --check launcher/youtube/subtitle-fix-ai.ts launcher/youtube/subtitle-fix-ai.test.ts`, and `bun run changelog:lint` passed. Repo-wide `bun run format:check:src` still reports unrelated pre-existing warnings in `launcher/youtube/orchestrator.ts` and `scripts/build-changelog*`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
---
|
||||||
|
id: TASK-118
|
||||||
|
title: Add Windows release build and SignPath signing
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 15:17'
|
||||||
|
updated_date: '2026-03-08 15:17'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
- signing
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- build/installer.nsh
|
||||||
|
- build/signpath-windows-artifact-config.xml
|
||||||
|
- package.json
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Extend the tag-driven release workflow so Windows artifacts are built on GitHub-hosted runners and submitted to SignPath for free open-source Authenticode signing, while preserving the existing macOS notarization path.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Release workflow builds Windows installer and ZIP artifacts on `windows-latest`
|
||||||
|
- [x] #2 Workflow submits unsigned Windows artifacts to SignPath and uploads the signed outputs for release publication
|
||||||
|
- [x] #3 Repository includes a checked-in SignPath artifact-configuration source of truth for the Windows release files
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Inspect the existing release workflow and current Windows packaging configuration.
|
||||||
|
2. Add a Windows release job that builds unsigned artifacts, uploads them as a workflow artifact, and submits them to SignPath.
|
||||||
|
3. Update the release aggregation job to publish signed Windows assets and mention Windows install steps in the generated release notes.
|
||||||
|
4. Check in the Windows SignPath artifact configuration XML used to define what gets signed.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
The repository already had Windows packaging configuration (`build:win`, NSIS include script, Windows helper asset packaging), but the release workflow still built Linux and macOS only.
|
||||||
|
|
||||||
|
Added a `build-windows` job to `.github/workflows/release.yml` that runs on `windows-latest`, validates required SignPath secrets, builds unsigned Windows artifacts, uploads them with `actions/upload-artifact@v4`, and then calls the official `signpath/github-action-submit-signing-request@v2` action to retrieve signed outputs.
|
||||||
|
|
||||||
|
Checked in `build/signpath-windows-artifact-config.xml` as the source-of-truth artifact configuration for SignPath. It signs the top-level NSIS installer EXE and deep-signs `.exe` and `.dll` files inside the portable ZIP artifact.
|
||||||
|
|
||||||
|
Updated the release aggregation job to download the signed Windows artifacts and added a Windows install section to the generated GitHub release body.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Windows release publishing is now wired into the tag-driven workflow. `.github/workflows/release.yml` builds Windows artifacts on `windows-latest`, submits them to SignPath using the official GitHub action, and publishes the signed `.exe` and `.zip` outputs alongside the Linux and macOS artifacts. The workflow now requests the additional `actions: read` permission required by the SignPath GitHub integration, and the generated release notes now include Windows installation steps.
|
||||||
|
|
||||||
|
The checked-in `build/signpath-windows-artifact-config.xml` file defines the SignPath artifact structure expected by the workflow artifact ZIP: sign the top-level `SubMiner-*.exe` installer and deep-sign `.exe` and `.dll` files inside `SubMiner-*.zip`.
|
||||||
|
|
||||||
|
Verification: workflow/static changes were checked with `git diff --check` on the touched files. Actual signing requires configured SignPath secrets and a matching artifact configuration in your SignPath project.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
id: TASK-119
|
||||||
|
title: Add Jellyfin remote-session subtitle streaming to texthooker
|
||||||
|
status: To Do
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 03:46'
|
||||||
|
labels:
|
||||||
|
- jellyfin
|
||||||
|
- texthooker
|
||||||
|
- subtitle
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/jellyfin-remote-commands.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/jellyfin.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/core/services/subtitle-processing-controller.ts
|
||||||
|
- 'https://api.jellyfin.org/'
|
||||||
|
documentation:
|
||||||
|
- 'https://api.jellyfin.org/'
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Allow SubMiner to follow subtitles from a separate Jellyfin client session, such as a TV app, without requiring local mpv playback. The feature should fetch the active subtitle stream from Jellyfin, map the remote playback position to subtitle cues, and feed the existing subtitle tokenization plus annotated texthooker websocket pipeline so texthooker-only mode can be used while watching on another device.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 User can target a remote Jellyfin session and stream its current subtitle cue into SubMiner's existing subtitle-processing pipeline without launching local Jellyfin playback in mpv.
|
||||||
|
- [ ] #2 Texthooker-only mode can display subtitle updates from the tracked remote Jellyfin session through the existing annotation websocket feed.
|
||||||
|
- [ ] #3 Remote session changes are handled safely: item changes, subtitle-track changes, pause/seek/stop, and session disconnects clear or refresh subtitle state without crashing.
|
||||||
|
- [ ] #4 The feature degrades clearly when the remote session has no usable text subtitle stream or uses an unsupported subtitle format.
|
||||||
|
- [ ] #5 Automated tests cover session tracking, subtitle cue selection, and feed integration; user-facing docs/config docs are updated.
|
||||||
|
<!-- AC:END -->
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
id: TASK-120
|
||||||
|
title: 'Replace node:sqlite with libsql and remove Yomitan Node wrapper'
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 04:14'
|
||||||
|
updated_date: '2026-03-08 04:39'
|
||||||
|
labels:
|
||||||
|
- runtime
|
||||||
|
- bun
|
||||||
|
- sqlite
|
||||||
|
- tech-debt
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Remove the remaining root Node requirement caused by immersion tracking SQLite usage and the old Yomitan build wrapper by migrating the local SQLite layer off node:sqlite, running the SQLite-backed verification lanes under Bun, and switching the vendored Yomitan build flow to Bun-native scripts.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Immersion tracker runtime no longer imports or requires node:sqlite
|
||||||
|
- [x] #2 SQLite-backed immersion tracker tests run under Bun without Node --experimental-sqlite
|
||||||
|
- [x] #3 Root build/test scripts no longer require the Yomitan Node wrapper or Node-based SQLite verification lanes
|
||||||
|
- [x] #4 README requirements/testing docs reflect the Bun-native workflow
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Replaced the immersion tracker SQLite dependency with a local libsql-backed wrapper, updated Bun/runtime compatibility tests to avoid process.exitCode side effects, switched Yomitan builds to run directly inside the vendored Bun-native project, deleted scripts/build-yomitan.mjs, and verified typecheck plus Bun build/test lanes (`build:yomitan`, `test:immersion:sqlite`, `test:runtime:compat`, `test:fast`).
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
id: TASK-121
|
||||||
|
title: >-
|
||||||
|
Fix YouTube manual subtitle selection regression when downloadable tracks
|
||||||
|
exist
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- '@codex'
|
||||||
|
created_date: '2026-03-08 05:37'
|
||||||
|
updated_date: '2026-03-08 05:42'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- youtube
|
||||||
|
- subtitles
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/manual-subs.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/orchestrator.ts
|
||||||
|
- 'https://www.youtube.com/watch?v=MXzQRLmN9hE'
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Ensure launcher YouTube subtitle generation reuses downloadable manual subtitle tracks when the video already has requested languages available, instead of falling back to whisper generation. Reproduce against videos like MXzQRLmN9hE that expose manual en/ja subtitles via yt-dlp.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 When requested primary/secondary manual YouTube subtitle tracks exist, planning selects them and schedules no whisper generation for those tracks.
|
||||||
|
- [x] #2 Filename normalization handles manual subtitle outputs produced by yt-dlp for language-tagged downloads.
|
||||||
|
- [x] #3 Automated tests cover the reproduced manual en/ja selection case.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Reproduced against https://www.youtube.com/watch?v=MXzQRLmN9hE with yt-dlp --list-subs: manual zh/en/ja/ko subtitle tracks are available from YouTube.
|
||||||
|
|
||||||
|
Adjusted launcher YouTube orchestration so detected manual subtitle tracks suppress whisper generation but are no longer materialized as external subtitle files. SubMiner now relies on the native YouTube/mpv subtitle tracks for those languages.
|
||||||
|
|
||||||
|
Added orchestration tests covering the manual-track reuse plan and ran a direct runtime probe against MXzQRLmN9hE. Probe result: primary/secondary native tracks detected, no external subtitle aliases emitted, output directory remained empty.
|
||||||
|
|
||||||
|
Verification: bun test launcher/youtube/orchestrator.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts passed; bun run typecheck passed.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Fixed the YouTube subtitle regression where videos with real downloadable subtitle tracks still ended up with duplicate external subtitle files. Manual subtitle availability now suppresses whisper generation and external subtitle publication, so videos like MXzQRLmN9hE use the native YouTube/mpv subtitle tracks directly. Launcher preprocess logging was also updated to report native subtitle availability instead of misleading missing statuses.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
id: TASK-122
|
||||||
|
title: Harden changelog workflow and CI enforcement
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- Codex
|
||||||
|
created_date: '2026-03-08 06:13'
|
||||||
|
updated_date: '2026-03-08 06:28'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- changelog
|
||||||
|
- ci
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/scripts/build-changelog.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/scripts/build-changelog.test.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/.github/workflows/ci.yml
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/.github/workflows/release.yml
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/docs/RELEASING.md
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/changes/README.md
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Improve the release changelog workflow so changelog fragments are reliable, release output is more readable, and pull requests get early feedback when changelog metadata is missing or malformed.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 `scripts/build-changelog.ts` ignores non-fragment files in `changes/` and validates fragment structure before generating changelog output.
|
||||||
|
- [x] #2 Generated `CHANGELOG.md` and `release/release-notes.md` group public changes into readable sections instead of a flat bullet list.
|
||||||
|
- [x] #3 CI enforces changelog validation on pull requests and provides an explicit opt-out path for changes that should not produce release notes.
|
||||||
|
- [x] #4 Contributor docs explain the fragment format and the PR/release workflow for changelog generation.
|
||||||
|
- [x] #5 Automated tests cover fragment parsing/building behavior and workflow enforcement expectations.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add failing tests for changelog fragment discovery, structured fragment parsing/rendering, release-note output, and CI workflow expectations.
|
||||||
|
2. Update scripts/build-changelog.ts to ignore non-fragment files, parse fragment metadata, group generated output by change type, add lint/PR-check commands, and simplify output paths to repo-local artifacts.
|
||||||
|
3. Update CI and PR workflow files to run changelog validation on pull requests with an explicit skip path, and keep release workflow using committed changelog output.
|
||||||
|
4. Refresh changes/README.md, docs/RELEASING.md, and any PR template text so contributors know how to write fragments and when opt-out is allowed.
|
||||||
|
5. Run targeted tests and changelog commands, then record results and finalize the task.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Implemented structured changelog fragments with required `type` and `area` metadata; `changes/README.md` is now ignored by the generator and verified by regression tests.
|
||||||
|
|
||||||
|
Added `changelog:lint` and `changelog:pr-check`, plus PR CI enforcement with `skip-changelog` opt-out. PR check now reads git name-status output so deleted fragment files do not satisfy the requirement.
|
||||||
|
|
||||||
|
Changed generated changelog/release notes output to grouped sections (`Added`, `Changed`, `Fixed`, etc.) and simplified release notes to highlights + install/assets pointers.
|
||||||
|
|
||||||
|
Kept changelog output repo-local. This aligns with existing repo direction where docs updates happen in the sibling docs repo explicitly rather than implicit local writes from app-repo generators.
|
||||||
|
|
||||||
|
Verification: `bun test scripts/build-changelog.test.ts src/ci-workflow.test.ts src/release-workflow.test.ts` passed; `bun run typecheck` passed; `bun run changelog:lint` passed. `bun run test:fast` still fails in unrelated existing `src/core/services/subsync.test.ts` cases (`runSubsyncManual keeps internal alass source file alive until sync finishes`, `runSubsyncManual resolves string sid values from mpv stream properties`).
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Hardened the changelog workflow end-to-end. `scripts/build-changelog.ts` now ignores helper files like `changes/README.md`, requires structured fragment metadata (`type` + `area`), groups generated release sections by change type, and emits shorter release notes focused on highlights plus install/assets pointers. Added explicit `changelog:lint` and `changelog:pr-check` commands, with PR validation based on git name-status so deleted fragment files do not satisfy the fragment requirement.
|
||||||
|
|
||||||
|
Updated contributor-facing workflow docs in `changes/README.md`, `docs/RELEASING.md`, and a new PR template so authors know to add a fragment or apply the `skip-changelog` label. CI now runs fragment linting on every run and enforces fragment presence on pull requests. Added regression coverage in `scripts/build-changelog.test.ts` and a new `src/ci-workflow.test.ts` to lock the workflow contract.
|
||||||
|
|
||||||
|
Verification completed: `bun test scripts/build-changelog.test.ts src/ci-workflow.test.ts src/release-workflow.test.ts`, `bun run typecheck`, and `bun run changelog:lint` all passed. A broader `bun run test:fast` run still fails in unrelated existing `src/core/services/subsync.test.ts` cases outside the changelog/workflow scope.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
id: TASK-123
|
||||||
|
title: Add progress logging for YouTube subtitle generation phases
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- '@codex'
|
||||||
|
created_date: '2026-03-08 07:07'
|
||||||
|
updated_date: '2026-03-08 07:15'
|
||||||
|
labels:
|
||||||
|
- ux
|
||||||
|
- logging
|
||||||
|
- youtube
|
||||||
|
- subtitles
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/orchestrator.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/audio-extraction.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/youtube/whisper.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/youtube/subtitle-fix-ai.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Improve launcher YouTube subtitle generation observability so users can tell that work is happening and roughly how long each phase is taking. Cover manual subtitle probe, audio extraction, ffmpeg prep, whisper generation, and optional AI subtitle fix phases without flooding normal logs.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Users see clear info-level phase logs for YouTube subtitle generation work including subtitle probe, fallback audio extraction, whisper, and optional AI fix phases.
|
||||||
|
- [x] #2 Long-running phases surface elapsed-time progress or explicit start/finish timing so it is obvious the process is still active.
|
||||||
|
- [x] #3 Automated tests cover the new logging/progress helper behavior where practical.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Implemented a shared timed YouTube phase logger in launcher/youtube/progress.ts with info-level start/finish messages and warn-level failure messages that include elapsed time.
|
||||||
|
|
||||||
|
Wired phase logging into YouTube metadata probe, manual subtitle probe, fallback audio extraction, ffmpeg whisper prep, whisper primary/secondary generation, and optional AI subtitle fix phases.
|
||||||
|
|
||||||
|
Verification: bun test launcher/youtube/progress.test.ts launcher/youtube/orchestrator.test.ts passed; bun run typecheck passed.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Added clear phase-level observability for YouTube subtitle generation without noisy tool output. Users now see start/finish logs with elapsed time for subtitle probe, fallback audio extraction, ffmpeg prep, whisper generation, and optional AI subtitle-fix phases, making it obvious when generation is active and roughly how long each step took.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,76 @@
|
|||||||
|
---
|
||||||
|
id: TASK-124
|
||||||
|
title: >-
|
||||||
|
Remove YouTube subtitle generation modes and make YouTube playback always
|
||||||
|
generate/load subtitles
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 07:18'
|
||||||
|
updated_date: '2026-03-08 07:28'
|
||||||
|
labels:
|
||||||
|
- launcher
|
||||||
|
- youtube
|
||||||
|
- subtitles
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/config/args-normalizer.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/config/youtube-subgen-config.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/types.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/config.example.jsonc
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/options-integrations.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/config/resolve/subtitle-domains.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Simplify launcher YouTube playback by removing the configurable subtitle generation mode. For YouTube targets, the launcher should treat subtitle generation/loading as the canonical behavior instead of supporting off/preprocess/automatic branches. This change should remove the unreliable automatic/background path and the mode concept from config/CLI/env/docs, while preserving the core YouTube subtitle generation pipeline and mpv loading flow.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Launcher playback no longer supports or branches on a YouTube subtitle generation mode; YouTube URLs follow a single generation-and-load flow.
|
||||||
|
- [x] #2 Configuration, CLI parsing, and environment handling no longer expose a YouTube subtitle generation mode option, and stale automatic/preprocess/off values are not part of the supported interface.
|
||||||
|
- [x] #3 Tests cover the new single-flow behavior and the removal of mode parsing/branching.
|
||||||
|
- [x] #4 User-facing config/docs/examples are updated to reflect the removed mode concept.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Remove the YouTube subtitle generation mode concept from launcher/shared types, config parsing, CLI options, and environment normalization so no supported interface accepts automatic/preprocess/off.
|
||||||
|
2. Update playback orchestration so YouTube targets always run subtitle generation/loading before mpv startup and delete the background automatic path.
|
||||||
|
3. Adjust mpv YouTube URL argument construction to no longer branch on mode while preserving subtitle/audio language behavior and preloaded subtitle file injection.
|
||||||
|
4. Add/modify tests first to cover removed mode parsing and the single YouTube preload flow, then update config/docs/examples to match the simplified interface.
|
||||||
|
5. Run focused launcher/config tests plus typecheck, then summarize any remaining gaps.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Removed launcher/shared youtubeSubgen.mode handling and collapsed YouTube playback onto a single preload-before-mpv subtitle generation flow.
|
||||||
|
|
||||||
|
Added launcher integration coverage proving YouTube subtitle generation runs before mpv startup and that the removed --mode flag now errors.
|
||||||
|
|
||||||
|
Verification: bun test launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/mpv.test.ts launcher/main.test.ts src/config/config.test.ts; bun run test:config:src; bun run typecheck.
|
||||||
|
|
||||||
|
Broader repo checks still show pre-existing issues outside this change: bun run test:launcher:unit:src fails in launcher/aniskip-metadata.test.ts (MAL id assertion), and format scope check reports unrelated existing files launcher/youtube/orchestrator.ts, scripts/build-changelog.test.ts, scripts/build-changelog.ts.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Removed the launcher YouTube subtitle generation mode surface so YouTube playback now always runs the subtitle generation pipeline before starting mpv. The launcher no longer accepts youtubeSubgen.mode from shared config, CLI, or env normalization, and the old automatic/background loading path has been deleted from playback.
|
||||||
|
|
||||||
|
Updated mpv YouTube startup options to keep manual subtitle discovery enabled without requesting auto subtitles, and refreshed user-facing config/docs to describe a single YouTube subtitle generation flow. Added regression coverage for mode removal, config/template cleanup, and launcher ordering so YouTube subtitle work is confirmed to happen before mpv launch.
|
||||||
|
|
||||||
|
Verification: bun test launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/mpv.test.ts launcher/main.test.ts src/config/config.test.ts; bun run test:config:src; bun run typecheck. Broader unrelated repo issues remain in launcher/aniskip-metadata.test.ts and existing formatting drift in launcher/youtube/orchestrator.ts plus scripts/build-changelog files.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
id: TASK-125
|
||||||
|
title: Add native AI API key secret storage
|
||||||
|
status: To Do
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 07:25'
|
||||||
|
labels:
|
||||||
|
- ai
|
||||||
|
- config
|
||||||
|
- security
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/ai/client.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-token-store.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/core/services/jellyfin-token-store.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Store the shared AI provider API key using the app's native secret-storage pattern so users do not need to keep the OpenRouter key in config files or shell commands.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Users can configure the shared AI provider without storing the API key in config.jsonc.
|
||||||
|
- [ ] #2 The app persists and reloads the shared AI API key using encrypted native secret storage when available.
|
||||||
|
- [ ] #3 Behavior is defined for existing ai.apiKey and ai.apiKeyCommand configs, including compatibility during migration.
|
||||||
|
- [ ] #4 The feature has regression tests covering key resolution and storage behavior.
|
||||||
|
- [ ] #5 User-facing configuration/docs are updated to describe the supported setup.
|
||||||
|
<!-- AC:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-126
|
||||||
|
title: >-
|
||||||
|
Improve secondary subtitle readability with hover-only background and stronger
|
||||||
|
text separation
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 07:35'
|
||||||
|
updated_date: '2026-03-08 07:40'
|
||||||
|
labels:
|
||||||
|
- overlay
|
||||||
|
- subtitles
|
||||||
|
- ui
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Adjust overlay secondary subtitle styling so translation text stays readable on bright video backgrounds. Keep the dark background hidden by default in hover mode and show it only while hovered. Increase secondary subtitle weight to 600 and strengthen edge separation without changing primary subtitle styling.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Secondary subtitles render with stronger edge separation than today.
|
||||||
|
- [x] #2 Secondary subtitle font weight defaults to 600.
|
||||||
|
- [x] #3 When secondary subtitle mode is hover, the secondary background appears only while hovered.
|
||||||
|
- [x] #4 Primary subtitle styling behavior remains unchanged.
|
||||||
|
- [x] #5 Renderer tests cover the new secondary hover background behavior and default secondary style values.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Adjusted secondary subtitle defaults to use stronger shadowing, 600 font weight, and a translucent dark background. Routed secondary background/backdrop styling through CSS custom properties so hover mode can keep the background hidden until the secondary subtitle is actually hovered. Added renderer and config tests covering default values and hover-only background behavior.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Improved secondary subtitle readability by strengthening default text separation, increasing the default secondary weight to 600, and making the configured dark background appear only while hovered in secondary hover mode. Added config and renderer coverage for the new defaults and hover-aware style routing.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,83 @@
|
|||||||
|
---
|
||||||
|
id: TASK-127
|
||||||
|
title: Skip AniSkip lookup for YouTube and URL playback targets
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- '@codex'
|
||||||
|
created_date: '2026-03-08 08:24'
|
||||||
|
updated_date: '2026-03-08 10:12'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- launcher
|
||||||
|
- youtube
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/mpv.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/launcher/mpv.test.ts
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Prevent launcher playback from attempting AniSkip metadata resolution when the user is playing a YouTube target or any URL target. AniSkip only works for local anime files, so URL-driven playback and YouTube subtitle-generation flows should bypass it entirely.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Launcher playback skips AniSkip metadata resolution for explicit URL targets, including YouTube URLs.
|
||||||
|
- [x] #2 YouTube subtitle-generation playback does not invoke AniSkip lookup before mpv launch.
|
||||||
|
- [x] #3 Automated launcher tests cover the URL/YouTube skip behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a launcher mpv unit test that intercepts AniSkip resolution and proves URL/YouTube playback does not call it before spawning mpv.
|
||||||
|
2. Run the focused launcher mpv test to confirm the new case fails or exposes the current gap.
|
||||||
|
3. Patch launcher playback/AniSkip gating so URL and YouTube subtitle-generation paths always bypass AniSkip lookup.
|
||||||
|
4. Re-run focused launcher tests and record the verification results in task notes.
|
||||||
|
|
||||||
|
5. Add a Lua plugin regression test covering overlay-start on URL playback so AniSkip never runs after auto-start.
|
||||||
|
|
||||||
|
6. Patch plugin/subminer/aniskip.lua to short-circuit all AniSkip lookup triggers for remote URL media paths.
|
||||||
|
|
||||||
|
7. Re-run plugin regression plus touched launcher checks and update the task summary with the plugin-side fix.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added explicit AniSkip gating in launcher/mpv.ts via shouldResolveAniSkipMetadata(target, targetKind, preloadedSubtitles).
|
||||||
|
|
||||||
|
URL targets now always bypass AniSkip. File targets with preloaded subtitles also bypass AniSkip, covering YouTube subtitle-preload playback.
|
||||||
|
|
||||||
|
Added launcher/mpv.test.ts coverage for local-file vs URL vs preloaded-subtitle AniSkip gating.
|
||||||
|
|
||||||
|
Verification: bun test launcher/mpv.test.ts passed.
|
||||||
|
|
||||||
|
Verification: bun run typecheck passed.
|
||||||
|
|
||||||
|
Verification: bunx prettier --check launcher/mpv.ts launcher/mpv.test.ts passed.
|
||||||
|
|
||||||
|
Verification: bun run changelog:lint passed.
|
||||||
|
|
||||||
|
Verification: bun run test:launcher:unit:src remains blocked by unrelated existing failure in launcher/aniskip-metadata.test.ts (`resolveAniSkipMetadataForFile resolves MAL id and intro payload`: expected malId 1234, got null).
|
||||||
|
|
||||||
|
Added plugin regression in scripts/test-plugin-start-gate.lua for URL playback with auto-start/overlay-start; it now asserts no MAL or AniSkip curl requests occur.
|
||||||
|
|
||||||
|
Patched plugin/subminer/aniskip.lua to short-circuit AniSkip lookup for remote media paths (`scheme://...`), which covers YouTube URL playback inside the mpv plugin lifecycle.
|
||||||
|
|
||||||
|
Verification: lua scripts/test-plugin-start-gate.lua passed.
|
||||||
|
|
||||||
|
Verification: bun run test:plugin:src passed.
|
||||||
|
|
||||||
|
Verification: bun test launcher/mpv.test.ts passed after plugin-side fix.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Fixed AniSkip suppression end-to-end for URL playback. The launcher now skips AniSkip before mpv launch, and the mpv plugin now also refuses AniSkip lookups for remote URL media during file-loaded, overlay-start, or later refresh triggers. Added regression coverage in both launcher/mpv.test.ts and scripts/test-plugin-start-gate.lua, plus a changelog fragment. Wider `bun run test:launcher:unit:src` is still blocked by the unrelated existing launcher/aniskip-metadata.test.ts MAL-id failure.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
id: TASK-128
|
||||||
|
title: >-
|
||||||
|
Prevent AI subtitle fix from translating primary YouTube subtitles into the
|
||||||
|
wrong language
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 09:02'
|
||||||
|
updated_date: '2026-03-08 09:17'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- youtube-subgen
|
||||||
|
- ai
|
||||||
|
dependencies: []
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
AI subtitle cleanup can preserve cue structure while changing subtitle language, causing primary Japanese subtitle files to come back in English. Add guards so AI-fixed subtitles preserve expected language and fall back to raw Whisper output when language drifts.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Primary AI subtitle fix rejects output that drifts away from the expected source language.
|
||||||
|
- [x] #2 Rejected AI fixes fall back to the raw Whisper subtitle without corrupting published subtitle language.
|
||||||
|
- [x] #3 Regression tests cover a primary Japanese subtitle batch being translated into English by the AI fixer.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Added a primary-language guard to AI subtitle fixing so Japanese source subtitles are rejected if the AI rewrites them into English while preserving SRT structure. The fixer now receives the expected source language from the YouTube orchestrator, and regression coverage verifies that language drift falls back to the raw Whisper subtitle path.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
id: TASK-129
|
||||||
|
title: >-
|
||||||
|
Split AI model and system prompt config between Anki and YouTube subtitle
|
||||||
|
generation
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 09:40'
|
||||||
|
updated_date: '2026-03-08 09:57'
|
||||||
|
labels:
|
||||||
|
- config
|
||||||
|
- ai
|
||||||
|
- anki
|
||||||
|
- youtube-subgen
|
||||||
|
dependencies: []
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
The current top-level shared AI config forces Anki translation and YouTube subtitle fixing to share the same model and system prompt, which caused subtitle-fix requests to inherit a translation prompt and translate Japanese primary subtitles into English. Refactor config so provider credentials stay shared while model and system prompt can be configured per feature.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Anki integration can use its own AI model and system prompt independently of YouTube subtitle generation.
|
||||||
|
- [x] #2 YouTube subtitle generation can use its own AI model and system prompt independently of Anki integration.
|
||||||
|
- [x] #3 Existing shared provider credentials remain reusable without duplicating API key/base URL config.
|
||||||
|
- [x] #4 Config example, defaults, validation, and regression tests cover the new per-feature override shape.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Added per-feature AI model/systemPrompt overrides for Anki and YouTube subtitle generation while keeping shared provider transport settings reusable. Anki now accepts `ankiConnect.ai` object config with `enabled`, `model`, and `systemPrompt`; YouTube subtitle generation accepts `youtubeSubgen.ai` overrides and merges them over the shared AI provider config. Updated config resolution, launcher parsing, runtime wiring, hot-reload handling, example config, and regression coverage.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,76 @@
|
|||||||
|
---
|
||||||
|
id: TASK-130
|
||||||
|
title: Keep background SubMiner alive after launcher-managed mpv exits
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 10:08'
|
||||||
|
updated_date: '2026-03-08 11:00'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- launcher
|
||||||
|
- mpv
|
||||||
|
- overlay
|
||||||
|
dependencies: []
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
The launcher currently tears down the running SubMiner background process when a launcher-managed mpv session exits. Background SubMiner should remain alive so a later mpv instance can reconnect and request the overlay without restarting the app.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Closing a launcher-managed mpv session does not send `--stop` to the running SubMiner background process.
|
||||||
|
- [x] #2 Closing a launcher-managed mpv session does not SIGTERM the tracked SubMiner process just because mpv exited.
|
||||||
|
- [x] #3 Launcher cleanup still terminates mpv and launcher-owned helper children without regressing existing overlay start behavior.
|
||||||
|
- [x] #4 Automated tests cover the no-stop-on-mpv-exit behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a launcher regression test that proves mpv exit no longer triggers SubMiner `--stop` or launcher SIGTERM of the tracked overlay process.
|
||||||
|
2. Update launcher teardown so normal mpv-session cleanup only stops mpv/helper children and preserves the background SubMiner process for future reconnects.
|
||||||
|
3. Run the focused launcher tests and smoke coverage for the affected behavior, then record results in the task.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Split launcher cleanup so normal mpv-session shutdown no longer sends `--stop` to SubMiner or SIGTERM to the tracked overlay process. Added `cleanupPlaybackSession()` for mpv/helper-child cleanup only, and switched playback finalization to use it.
|
||||||
|
|
||||||
|
Updated launcher smoke coverage to assert the background app stays alive after mpv exits, and added a focused unit regression for the new cleanup path.
|
||||||
|
|
||||||
|
Validation: `bun test launcher/mpv.test.ts launcher/smoke.e2e.test.ts` passed; `bun run typecheck` passed. `bun run test:launcher:unit:src` still reports an unrelated pre-existing failure in `launcher/aniskip-metadata.test.ts`.
|
||||||
|
|
||||||
|
Added changelog fragment `changes/task-130.md` for the launcher fix and verified it with `bun run changelog:lint`.
|
||||||
|
|
||||||
|
User verified the bug still reproduces when closing playback with `q`. Root cause narrowed further: the mpv plugin `plugin/subminer/lifecycle.lua` calls `process.stop_overlay()` on mpv `shutdown`, which still sends SubMiner `--stop` even after launcher cleanup was fixed.
|
||||||
|
|
||||||
|
Patched the remaining stop path in `plugin/subminer/lifecycle.lua`: mpv `shutdown` no longer calls `process.stop_overlay()`. Pressing mpv `q` should now preserve the background app and only tear down the mpv session.
|
||||||
|
|
||||||
|
Validation update: `lua scripts/test-plugin-start-gate.lua` passed after adding a shutdown regression, and `bun test launcher/mpv.test.ts launcher/smoke.e2e.test.ts` still passed.
|
||||||
|
|
||||||
|
Fixed a second-instance reconnect bug in `src/core/services/cli-command.ts`: `--start` on an already-initialized running instance now still updates the MPV socket path and reconnects the MPV client instead of treating the command as a no-op. This keeps the already-warmed background app reusable for later mpv launches.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Kept the background SubMiner process reusable across both mpv shutdown and later reconnects. The first fix separated launcher playback cleanup from full app shutdown. The second fix removed the mpv plugin `shutdown` stop call so default mpv `q` no longer sends SubMiner `--stop`. The third fix corrected second-instance CLI handling so `--start` on an already-running, already-initialized instance still reconnects MPV instead of being ignored.
|
||||||
|
|
||||||
|
Net effect: background SubMiner can stay alive, keep its warm state, and reconnect to later mpv instances without rerunning startup/warmup work in a fresh app instance.
|
||||||
|
|
||||||
|
Coverage now includes: launcher playback cleanup (`launcher/mpv.test.ts`), launcher smoke reconnect/keep-alive flow (`launcher/smoke.e2e.test.ts`), mpv plugin shutdown preservation (`scripts/test-plugin-start-gate.lua`), and second-instance start/reconnect behavior (`src/core/services/cli-command.test.ts`).
|
||||||
|
|
||||||
|
Tests run:
|
||||||
|
- `bun test src/core/services/cli-command.test.ts launcher/mpv.test.ts launcher/smoke.e2e.test.ts`
|
||||||
|
- `lua scripts/test-plugin-start-gate.lua`
|
||||||
|
- `bun run typecheck`
|
||||||
|
- `bun run changelog:lint`
|
||||||
|
|
||||||
|
Note: the broader `bun run test:launcher:unit:src` lane still has an unrelated pre-existing failure in `launcher/aniskip-metadata.test.ts`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,63 @@
|
|||||||
|
---
|
||||||
|
id: TASK-131
|
||||||
|
title: Avoid duplicate tokenization warmup after background startup
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 10:12'
|
||||||
|
updated_date: '2026-03-08 12:00'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/composers/mpv-runtime-composer.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-warmups.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/main/runtime/composers/mpv-runtime-composer.test.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
When SubMiner is already running in the background and mpv is launched from the launcher or mpv plugin, the live app should reuse startup tokenization warmup state instead of re-entering the Yomitan/tokenization/annotation warmup path on first overlay use.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Background startup tokenization warmup is recorded in the runtime state used by later mpv/tokenization flows.
|
||||||
|
- [x] #2 Launching mpv from the launcher or plugin against an already-running background app does not re-run duplicate Yomitan/tokenization annotation warmup work in the live process.
|
||||||
|
- [x] #3 Regression tests cover the warmed-background path and protect against re-entering duplicate warmup work.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a regression test covering the case where background startup warmups already completed and a later tokenize call must not re-enter Yomitan/MeCab/dictionary warmups.
|
||||||
|
2. Update mpv tokenization warmup composition so startup background warmups and on-demand tokenization share the same completion state.
|
||||||
|
3. Run the focused composer/runtime tests and update acceptance criteria/notes with results.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Root-cause hypothesis: startup background warmups and on-demand tokenization warmups use separate state, so later mpv launch can re-enter warmup bookkeeping even though background startup already warmed dependencies.
|
||||||
|
|
||||||
|
Implemented shared warmup state between startup background warmups and on-demand tokenization warmups by forwarding scheduled Yomitan/tokenization promises into the mpv runtime composer. Added regression coverage for the warmed-background path. Verified with `bun run test:fast` plus focused composer/startup warmup tests.
|
||||||
|
|
||||||
|
Follow-up root cause from live retest: second mpv open could still pause on the startup gate because the runtime only treated full background tokenization warmup completion as reusable readiness. In practice, first-file tokenization could already be ready while slower dictionary prewarm work was still finishing, so reopening a video waited on duplicate warmup completion even though annotations were already usable.
|
||||||
|
|
||||||
|
Adjusted `src/main/runtime/composers/mpv-runtime-composer.ts` so autoplay reuse keys off a separate playback-ready latch. The latch flips true either when background warmups fully cover tokenization or when `onTokenizationReady` fires for a real subtitle line. `src/main.ts` already uses `isTokenizationWarmupReady()` to fast-signal `subminer-autoplay-ready` on a fresh media-path change, so reopened videos can now resume immediately once tokenization has succeeded once in the persistent app.
|
||||||
|
|
||||||
|
Validation update: `bun test src/core/services/cli-command.test.ts src/main/runtime/mpv-main-event-actions.test.ts src/main/runtime/composers/mpv-runtime-composer.test.ts launcher/mpv.test.ts launcher/smoke.e2e.test.ts` passed, `lua scripts/test-plugin-start-gate.lua` passed, and `bun run typecheck` passed.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Background startup tokenization warmups now feed the same in-memory warmup state used by later mpv tokenization. When the app is already running and warmed in the background, launcher/plugin-driven mpv startup reuses that state instead of re-entering Yomitan/tokenization annotation warmups. Added a regression test for the warmed-background path and verified with `bun run test:fast`.
|
||||||
|
|
||||||
|
A later follow-up fixed the remaining second-open delay: autoplay reuse no longer waits for the entire background dictionary warmup pipeline to finish. After the persistent app has produced one tokenization-ready event, later mpv reconnects reuse that readiness immediately, so reopening the same or another video does not pause again on duplicate warmup bookkeeping.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
---
|
||||||
|
id: TASK-131
|
||||||
|
title: Make default overlay fullscreen and AniSkip end-jump keybindings easier to reach
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:30'
|
||||||
|
labels:
|
||||||
|
- enhancement
|
||||||
|
- overlay
|
||||||
|
- mpv
|
||||||
|
- aniskip
|
||||||
|
dependencies: []
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Make two default keyboard actions easier to hit during playback: add `f` as the built-in overlay fullscreen toggle, and make AniSkip's default intro-end jump use `Tab`.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Default overlay keybindings include `KeyF` mapped to mpv fullscreen toggle.
|
||||||
|
- [x] #2 Default AniSkip hint/button key defaults to `Tab` and the plugin registers that binding.
|
||||||
|
- [x] #3 Automated regression coverage exists for both default bindings.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
|
||||||
|
1. Add a failing TypeScript regression proving default overlay keybindings include fullscreen on `KeyF`.
|
||||||
|
2. Add a failing Lua/plugin regression proving AniSkip defaults to `Tab`, updates the OSD hint text, and registers the expected keybinding.
|
||||||
|
3. Patch the default keybinding/config values with minimal behavior changes and keep fallback binding behavior intentional.
|
||||||
|
4. Run focused tests plus touched verification commands, then record results and a short changelog fragment.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added `KeyF -> ['cycle', 'fullscreen']` to the built-in overlay keybindings in `src/config/definitions/shared.ts`.
|
||||||
|
|
||||||
|
Changed the mpv plugin AniSkip default button key from `y-k` to `TAB` in both the runtime default options and the shipped `plugin/subminer.conf`. The AniSkip OSD hint now also falls back to `TAB` when no explicit key is configured.
|
||||||
|
|
||||||
|
Adjusted `plugin/subminer/ui.lua` fallback registration so the legacy `y-k` binding is only added for custom non-default AniSkip bindings, instead of always shadowing the new default.
|
||||||
|
|
||||||
|
Extended regression coverage:
|
||||||
|
|
||||||
|
- `src/config/definitions/domain-registry.test.ts` now asserts the default fullscreen binding on `KeyF`.
|
||||||
|
- `scripts/test-plugin-start-gate.lua` now isolates plugin runs correctly, records keybinding/observer registration, and asserts the default AniSkip keybinding/prompt behavior for `TAB`.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/config/definitions/domain-registry.test.ts`
|
||||||
|
- `bun run test:config:src`
|
||||||
|
- `lua scripts/test-plugin-start-gate.lua`
|
||||||
|
- `bun run changelog:lint`
|
||||||
|
- `bun run typecheck`
|
||||||
|
|
||||||
|
Known unrelated verification gap:
|
||||||
|
|
||||||
|
- `bun run test:plugin:src` still fails in `scripts/test-plugin-binary-windows.lua` on this Linux host (`windows env override should resolve .exe suffix`), outside the keybinding changes in this task.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Default overlay playback now has an easier fullscreen toggle on `f`, and AniSkip's default intro-end jump now uses `Tab`. The mpv plugin hint text and registration logic were updated to match the new default, while keeping legacy `y-k` fallback behavior limited to custom non-default bindings.
|
||||||
|
|
||||||
|
Regression coverage was added for both defaults, and the plugin test harness now resets plugin bootstrap state between scenarios so keybinding assertions can run reliably.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
---
|
||||||
|
id: TASK-132
|
||||||
|
title: Gate macOS overlay shortcuts to the focused mpv window
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 18:24'
|
||||||
|
updated_date: '2026-03-08 18:55'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- macos
|
||||||
|
- shortcuts
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/core/services/overlay-shortcut.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/window-trackers/macos-tracker.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/scripts/get-mpv-window-macos.swift
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Fix the macOS shortcut handling so SubMiner overlay keybinds do not intercept system or other-app shortcuts while SubMiner is in the background. Overlay shortcuts should only be active while the tracked mpv window is present and focused, and should stop grabbing keyboard input when mpv is not the frontmost window.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 On macOS, overlay shortcuts do not trigger while mpv is not the focused/frontmost window.
|
||||||
|
- [x] #2 On macOS, overlay shortcuts remain available while the tracked mpv window is open and focused.
|
||||||
|
- [x] #3 Existing non-macOS shortcut behavior is unchanged.
|
||||||
|
- [x] #4 Automated tests cover the macOS focus-gating behavior and guard against background shortcut interception.
|
||||||
|
- [x] #5 Any user-facing docs/config notes affected by the behavior change are updated in the same task if needed.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a failing macOS-focused shortcut lifecycle test that proves overlay shortcuts stay inactive when the tracked mpv window exists but is not frontmost, and activate when that tracked window becomes frontmost.
|
||||||
|
2. Add a failing tracker/helper test that covers the focused/frontmost signal parsed from the macOS helper output.
|
||||||
|
3. Extend the macOS helper/tracker contract to surface both geometry and focused/frontmost state for the tracked mpv window.
|
||||||
|
4. Wire overlay shortcut activation to require both overlay runtime initialization and tracked-mpv focus on macOS, while leaving non-macOS behavior unchanged.
|
||||||
|
5. Re-run the targeted shortcut/tracker tests, then the broader related shortcut/runtime suite, and update task notes/acceptance criteria based on results.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added a macOS-specific shortcut activation predicate so global overlay shortcuts now require both overlay runtime readiness and a focused tracked mpv window; non-macOS behavior still keys off runtime readiness only.
|
||||||
|
|
||||||
|
Extended the base window tracker with optional focus-state callbacks/getters and wired initializeOverlayRuntime to re-sync overlay shortcuts whenever tracker focus changes.
|
||||||
|
|
||||||
|
Updated the macOS helper/tracker contract to return geometry plus frontmost/focused state for the tracked mpv process and added parser coverage for focused and unfocused output.
|
||||||
|
|
||||||
|
Verified with `bun x tsc -p tsconfig.json --noEmit`, targeted shortcut/tracker tests, and `bun run test:core:src` (439 passing).
|
||||||
|
|
||||||
|
No user-facing config or documentation surface changed, so no docs update was required for this fix.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Fixed the macOS background shortcut interception bug by gating SubMiner's global overlay shortcuts on tracked mpv focus instead of overlay-runtime initialization alone. The macOS window helper now reports whether the tracked mpv process is frontmost, the tracker exposes focus change callbacks, and overlay shortcut synchronization re-runs when that focus state flips so `Ctrl+C`/`Ctrl+V` and similar shortcuts are no longer captured while mpv is in the background.
|
||||||
|
|
||||||
|
The change keeps existing non-macOS shortcut behavior unchanged. Added regression coverage for the activation decision, tracker focus-change re-sync, and macOS helper output parsing. Verification: `bun x tsc -p tsconfig.json --noEmit`, targeted shortcut/tracker tests, and `bun run test:core:src` (439 passing).
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
id: TASK-133
|
||||||
|
title: Improve AniList character dictionary parity with upstream guide
|
||||||
|
status: To Do
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-08 21:06'
|
||||||
|
updated_date: '2026-03-08 21:35'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- anilist
|
||||||
|
- planning
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- >-
|
||||||
|
https://github.com/bee-san/Japanese_Character_Name_Dictionary/blob/main/docs/agents_read_me.md
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.test.ts
|
||||||
|
documentation:
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/docs/plans/2026-03-08-anilist-character-dictionary-parity-design.md
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/docs/plans/2026-03-08-anilist-character-dictionary-parity.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Plan and implement guide-faithful parity improvements for the AniList character dictionary flow inside SubMiner's current single-media generation path. Scope includes AniList first/last name hints, hint-aware reading generation for kanji/native names, expanded honorific coverage, 160x200 JPEG thumbnail handling, and AniList 429 retry/backoff behavior.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 AniList character queries include first/last name fields and preserve them through runtime data models.
|
||||||
|
- [ ] #2 Dictionary generation uses hint-aware name splitting and reading generation for kanji and mixed native names, not only kana-only readings.
|
||||||
|
- [ ] #3 Honorific generation is expanded substantially toward upstream coverage and is covered by regression tests.
|
||||||
|
- [ ] #4 Character and voice-actor images are resized or re-encoded to bounded JPEG thumbnails with fallback behavior.
|
||||||
|
- [ ] #5 AniList requests handle 429 responses with bounded exponential backoff and tests cover retry behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Approved design and implementation plan captured on 2026-03-08. Scope stays within current single-media AniList dictionary flow; excludes username-driven CURRENT-list fetching and Yomitan auto-update schema work.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
id: TASK-134
|
||||||
|
title: Harden Windows release signing against transient SignPath failures
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-08 20:23'
|
||||||
|
labels:
|
||||||
|
- ci
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
- signing
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- package.json
|
||||||
|
- src/release-workflow.test.ts
|
||||||
|
- https://github.com/ksyasuda/SubMiner/actions/runs/22836585479
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
The tag-driven Release workflow currently fails the Windows lane if the SignPath connector returns transient 502 errors during submission, and the tagged build scripts also allow electron-builder to implicitly publish unsigned artifacts before the final release job runs. Harden the workflow so transient SignPath outages get bounded retries and release packaging never auto-publishes unsigned assets.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Windows release signing retries transient SignPath submission failures within the release workflow before failing the job.
|
||||||
|
- [ ] #2 Release packaging scripts disable electron-builder implicit publish so build jobs do not upload unsigned assets on tag builds.
|
||||||
|
- [ ] #3 Regression coverage fails if SignPath retry scaffolding or publish suppression is removed.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a regression test for the release workflow/package script shape covering SignPath retries and `--publish never`.
|
||||||
|
2. Patch the Windows release job to retry SignPath submission a bounded number of times and still fail hard if every attempt fails.
|
||||||
|
3. Update tagged package build scripts to disable implicit electron-builder publishing during release builds.
|
||||||
|
4. Run targeted release-workflow verification and capture any remaining manual release cleanup needed for `v0.5.0`.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
The failed Windows signing step in GitHub Actions run `22836585479` was not caused by missing secrets or an artifact-shape mismatch. The SignPath GitHub action retried repeated `502` responses from the SignPath connector for several minutes and then failed the job.
|
||||||
|
|
||||||
|
Hardened `.github/workflows/release.yml` by replacing the single SignPath submission with three bounded attempts. The second and third submissions only run if the previous attempt failed, and the job now fails with an explicit rerun message only after all three attempts fail. Signed-artifact upload is keyed to the successful attempt so the release job still consumes the normal `windows` artifact name.
|
||||||
|
|
||||||
|
Also fixed a separate release regression exposed by the same run: `electron-builder` was implicitly publishing unsigned release assets during tag builds because the packaging scripts did not set `--publish never` and the workflow injected `GH_TOKEN` into build jobs. Updated the relevant package scripts to pass `--publish never`, removed `GH_TOKEN` from the packaging jobs, and made the final publish step force `--draft=false` when editing an existing tag release so previously-created draft releases get published.
|
||||||
|
|
||||||
|
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, and `bun run test:fast` all passed locally after restoring the missing local `libsql` install with `bun install --frozen-lockfile`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Windows release signing is now resilient to transient SignPath connector outages. The release workflow retries the SignPath submission up to three times before failing, and only uploads the signed Windows artifact from the attempt that succeeded.
|
||||||
|
|
||||||
|
Release packaging also no longer auto-publishes unsigned assets on tag builds. The `electron-builder` scripts now force `--publish never`, the build jobs no longer pass `GH_TOKEN` into packaging steps, and the final GitHub release publish step explicitly clears draft state when updating an existing tag release.
|
||||||
|
|
||||||
|
Validation: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
Manual follow-up for the failed `v0.5.0` release: rerun the `Release` workflow after merging/pushing this fix, then clean up the stray draft/untagged release assets created by the failed run if they remain.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
id: TASK-135
|
||||||
|
title: Cut patch release v0.5.1 for Windows signing fix
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 20:24'
|
||||||
|
updated_date: '2026-03-08 20:28'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-134
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- release/release-notes.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Publish a patch release from the workflow-signing fix on `main` by bumping the app version, generating the committed changelog artifacts for the new version, and pushing a new `v0.5.1` tag instead of rewriting the failed `v0.5.0` tag.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Repository version metadata is updated to `0.5.1`.
|
||||||
|
- [ ] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.1` section and released fragments are removed.
|
||||||
|
- [ ] #3 New `v0.5.1` commit and tag are pushed to `origin`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Bump the package version to `0.5.1`.
|
||||||
|
2. Run the changelog builder so `CHANGELOG.md`/`release-notes.md` match the release workflow contract.
|
||||||
|
3. Run the relevant verification commands.
|
||||||
|
4. Commit the release-prep changes, create `v0.5.1`, and push both commit and tag.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Bumped `package.json` from `0.5.0` to `0.5.1`, then ran `bun run changelog:build` so the committed release artifacts match the release workflow contract. That prepended the `v0.5.1` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed changelog fragments from `changes/`.
|
||||||
|
|
||||||
|
Verification before tagging: `bun run changelog:lint`, `bun run changelog:check --version 0.5.1`, `bun run typecheck`, and `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.1` from the signing-workflow fix on `main` instead of rewriting the failed `v0.5.0` tag. Repository version metadata, changelog, and committed release notes are all aligned with the new release tag, and the consumed changelog fragments were removed.
|
||||||
|
|
||||||
|
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.1`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
---
|
||||||
|
id: TASK-136
|
||||||
|
title: Pin SignPath artifact configuration in release workflow
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 20:41'
|
||||||
|
updated_date: '2026-03-08 20:58'
|
||||||
|
labels:
|
||||||
|
- ci
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
- signing
|
||||||
|
dependencies:
|
||||||
|
- TASK-134
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- build/signpath-windows-artifact-config.xml
|
||||||
|
- src/release-workflow.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
The Windows release workflow currently relies on the default SignPath artifact configuration configured in the SignPath UI. Pin the workflow to an explicit artifact-configuration slug so the checked-in signing configuration and CI behavior stay deterministic across future SignPath project changes.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 The Windows release workflow validates a dedicated SignPath artifact-configuration secret/input.
|
||||||
|
- [ ] #2 Every SignPath submission attempt passes `artifact-configuration-slug`.
|
||||||
|
- [ ] #3 Regression coverage fails if the explicit SignPath artifact-configuration binding is removed.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a failing workflow regression test for the explicit SignPath artifact-configuration slug.
|
||||||
|
2. Patch the Windows signing secret validation and SignPath action inputs to require the slug.
|
||||||
|
3. Run targeted release-workflow verification plus the standard fast lane.
|
||||||
|
4. Cut a new patch release so the tag-triggered release workflow runs with the pinned SignPath configuration.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added regression coverage in `src/release-workflow.test.ts` for an explicit SignPath artifact-configuration slug so the release workflow test now fails if the slug validation or action input is removed.
|
||||||
|
|
||||||
|
Patched `.github/workflows/release.yml` so Windows signing now requires `SIGNPATH_ARTIFACT_CONFIGURATION_SLUG` during secret validation and passes `artifact-configuration-slug: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}` on every SignPath submission attempt.
|
||||||
|
|
||||||
|
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
The release workflow is now pinned to an explicit SignPath artifact configuration instead of relying on whichever SignPath artifact config is marked default in the UI. Windows signing secret validation fails fast if `SIGNPATH_ARTIFACT_CONFIGURATION_SLUG` is missing, and every SignPath submission attempt now includes the pinned slug.
|
||||||
|
|
||||||
|
Validation: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
id: TASK-137
|
||||||
|
title: Cut patch release v0.5.2 for SignPath artifact config pinning
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 20:44'
|
||||||
|
updated_date: '2026-03-08 20:58'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-136
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- release/release-notes.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Publish a patch release from the SignPath artifact-configuration pinning change by bumping the app version, generating the committed changelog artifacts for the new version, and pushing a new `v0.5.2` tag.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Repository version metadata is updated to `0.5.2`.
|
||||||
|
- [ ] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.2` section and consumed fragments are removed.
|
||||||
|
- [ ] #3 New `v0.5.2` commit and tag are pushed to `origin`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add the release fragment for the SignPath configuration pinning change.
|
||||||
|
2. Bump `package.json` to `0.5.2` and run the changelog builder.
|
||||||
|
3. Run changelog/typecheck/test verification.
|
||||||
|
4. Commit the release-prep change set, create `v0.5.2`, and push commit plus tag.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Bumped `package.json` from `0.5.1` to `0.5.2`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.2` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/signpath-artifact-config-pin.md` fragment.
|
||||||
|
|
||||||
|
Verification before tagging: `bun run changelog:lint`, `bun run changelog:check --version 0.5.2`, `bun run typecheck`, and `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.2` so the explicit SignPath artifact-configuration pin ships on a fresh release tag. Version metadata, committed changelog artifacts, and release notes are aligned with the new patch version.
|
||||||
|
|
||||||
|
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.2`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,62 @@
|
|||||||
|
---
|
||||||
|
id: TASK-138
|
||||||
|
title: Publish unsigned Windows release artifacts and add local unsigned build script
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- package.json
|
||||||
|
- src/release-workflow.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Stop the tag-driven release workflow from depending on SignPath and publish unsigned Windows `.exe` and `.zip` artifacts directly. Add an explicit local `build:win:unsigned` script without changing the existing `build:win` command.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Windows release CI builds unsigned artifacts without requiring SignPath secrets.
|
||||||
|
- [x] #2 The Windows release job uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact.
|
||||||
|
- [x] #3 The repo exposes a local `build:win:unsigned` script for explicit unsigned Windows packaging.
|
||||||
|
- [x] #4 Regression coverage fails if the workflow reintroduces SignPath submission or drops the unsigned script.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Update workflow regression tests to assert unsigned Windows release behavior and the new local script.
|
||||||
|
2. Patch `package.json` to add `build:win:unsigned`.
|
||||||
|
3. Patch `.github/workflows/release.yml` to build unsigned Windows artifacts and upload them directly.
|
||||||
|
4. Add the release changelog fragment and run focused verification.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Removed the Windows SignPath secret validation and submission steps from `.github/workflows/release.yml`. The Windows release job now runs `bun run build:win:unsigned` and uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact consumed by the release job.
|
||||||
|
|
||||||
|
Added `scripts/build-win-unsigned.mjs` plus the `build:win:unsigned` package script. The wrapper clears Windows code-signing environment variables and disables identity auto-discovery before invoking `electron-builder`, so release CI stays unsigned even if signing credentials are configured elsewhere.
|
||||||
|
|
||||||
|
Updated `src/release-workflow.test.ts` to assert the unsigned workflow contract and added the release changelog fragment in `changes/unsigned-windows-release-builds.md`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Windows release CI now publishes unsigned artifacts directly and no longer depends on SignPath. Local developers also have an explicit `bun run build:win:unsigned` path for unsigned packaging without changing the existing `build:win` command.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
- `bun test src/release-workflow.test.ts`
|
||||||
|
- `bun run typecheck`
|
||||||
|
- `node --check scripts/build-win-unsigned.mjs`
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
id: TASK-139
|
||||||
|
title: Cut patch release v0.5.3 for unsigned Windows release builds
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-138
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- release/release-notes.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Publish a patch release from the unsigned Windows release-build change by bumping the app version, generating committed changelog artifacts for `v0.5.3`, and pushing the release-prep commit.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Repository version metadata is updated to `0.5.3`.
|
||||||
|
- [x] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.3` section and consumed fragments are removed.
|
||||||
|
- [x] #3 New `v0.5.3` release-prep commit is pushed to `origin/main`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Bump `package.json` from `0.5.2` to `0.5.3`.
|
||||||
|
2. Run `bun run changelog:build` so committed changelog artifacts match the new patch version.
|
||||||
|
3. Run changelog/typecheck/test verification.
|
||||||
|
4. Commit the release-prep change set and push `main`.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Bumped `package.json` from `0.5.2` to `0.5.3`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.3` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/unsigned-windows-release-builds.md` fragment.
|
||||||
|
|
||||||
|
Verification before push: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, and `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.3` so the unsigned Windows release-build change is captured in committed release metadata on `main`. Version metadata, changelog output, and release notes are aligned with the new patch version.
|
||||||
|
|
||||||
|
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
id: TASK-140
|
||||||
|
title: Fix guessit title parsing for character dictionary sync
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:25'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- anilist
|
||||||
|
- bug
|
||||||
|
- guessit
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-updater.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-updater.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Fix AniList character dictionary auto-sync for filenames where `guessit` misparses the full path and our title extraction keeps only the first array segment, causing AniList resolution to match the wrong anime and abort merged dictionary refresh.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 AniList media guessing passes basename-only targets to `guessit` so parent folder names do not corrupt series title detection.
|
||||||
|
- [x] #2 Guessit title arrays are combined into one usable title instead of truncating to the first segment.
|
||||||
|
- [x] #3 Regression coverage includes the Bunny Girl Senpai filename shape that previously resolved to the wrong AniList entry.
|
||||||
|
- [x] #4 Verification confirms the targeted AniList guessing tests pass.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Root repro: `guessit` parsed the Bunny Girl Senpai full path as `title: ["Rascal", "Does-not-Dream-of-Bunny-Girl-Senapi"]`, and our `firstString` helper kept only `Rascal`, which resolved to AniList 3490 (`rayca`) and produced zero character results. Fixed by sending basename-only input to `guessit` and joining multi-part guessit title arrays.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
---
|
||||||
|
id: TASK-141
|
||||||
|
title: Refresh current subtitle after character dictionary sync completes
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:55'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- overlay
|
||||||
|
- bug
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
When character dictionary auto-sync finishes after startup tokenization, invalidate cached subtitle tokenization and refresh the current subtitle so character-name highlighting catches up without waiting for the next subtitle line.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Successful character dictionary sync exposes a completion hook for main runtime follow-up.
|
||||||
|
- [x] #2 Main runtime clears Yomitan parser caches and refreshes the current subtitle after sync completion.
|
||||||
|
- [x] #3 Regression coverage verifies the sync completion callback fires on successful sync.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Observed on Bunny Girl Senpai startup: autoplay/tokenization became ready around 8s, but snapshot/import/state write completed roughly 31s after launch, leaving the current subtitle tokenized without the newly imported character dictionary. Fixed by adding an auto-sync completion hook that clears parser caches and refreshes the current subtitle.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-142
|
||||||
|
title: Show character dictionary auto-sync progress on OSD
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 01:10'
|
||||||
|
updated_date: '2026-03-09 01:10'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- overlay
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync-notifications.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
When character dictionary auto-sync runs for a newly opened anime, surface progress so users know why character-name lookup/highlighting is temporarily unavailable via the mpv OSD without desktop notification popups.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Character dictionary auto-sync emits progress events for syncing, importing, ready, and failure states.
|
||||||
|
- [x] #2 Main runtime routes those progress events through OSD notifications without desktop notifications.
|
||||||
|
- [x] #3 Regression coverage verifies progress events and notification routing behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
OSD now shows auto-sync phase changes while the dictionary updates. Desktop notifications were removed for this path to avoid startup popup spam.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-143
|
||||||
|
title: Keep character dictionary auto-sync non-blocking during startup
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 01:45'
|
||||||
|
updated_date: '2026-03-09 01:45'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- startup
|
||||||
|
- performance
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/current-media-tokenization-gate.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Keep character dictionary auto-sync running in parallel during startup without delaying playback. Only tokenization readiness should gate playback; character dictionary import/settings updates should wait until tokenization is already ready and then refresh annotations afterward.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Character dictionary snapshot/build work can run immediately during startup.
|
||||||
|
- [x] #2 Yomitan dictionary mutation work waits until current-media tokenization is ready.
|
||||||
|
- [x] #3 Regression coverage verifies auto-sync builds before the gate and only mutates Yomitan after the gate resolves.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added a small current-media tokenization gate in main runtime. Media changes reset the gate, the first tokenization-ready event marks it ready, and auto-sync now waits on that gate only before Yomitan dictionary inspection/import/settings updates. Snapshot generation and merged ZIP build still run immediately in parallel.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
id: TASK-144
|
||||||
|
title: Sequence startup OSD notifications for tokenization, annotations, and character dictionary sync
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 10:40'
|
||||||
|
updated_date: '2026-03-09 10:40'
|
||||||
|
labels:
|
||||||
|
- startup
|
||||||
|
- overlay
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/subtitle-tokenization-main-deps.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync-notifications.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Keep startup OSD progress ordered. While tokenization is still pending, only show the tokenization loading message. After tokenization becomes ready, show annotation loading if annotation warmup still remains. Only surface character dictionary auto-sync progress after annotation loading clears, and only if the dictionary work is still active.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Character dictionary progress stays hidden while tokenization startup loading is still active.
|
||||||
|
- [x] #2 Annotation loading OSD appears after tokenization readiness and before any later character dictionary progress.
|
||||||
|
- [x] #3 Regression coverage verifies buffered dictionary progress/failure ordering during startup.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added a small startup OSD sequencer in main runtime. Annotation warmup OSD now flows through that sequencer, and character dictionary sync notifications buffer until tokenization plus annotation loading clear. Buffered `ready` updates are dropped if dictionary progress finished before it ever became visible, while buffered failures still surface after annotation loading completes.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-145
|
||||||
|
title: Show character dictionary build progress on startup OSD before import
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 11:20'
|
||||||
|
updated_date: '2026-03-09 11:20'
|
||||||
|
labels:
|
||||||
|
- startup
|
||||||
|
- dictionary
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.test.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Surface an explicit character-dictionary build phase on startup OSD so there is visible progress between subtitle annotation loading and the later import/upload step when merged dictionary generation is still running.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Auto-sync emits a dedicated in-flight status while merged dictionary generation is running.
|
||||||
|
- [x] #2 Startup OSD sequencing treats that build phase as progress and can surface it after annotation loading clears.
|
||||||
|
- [x] #3 Regression coverage verifies the build phase is emitted before import begins.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added a `building` progress phase before `buildMergedDictionary(...)` and included it in the startup OSD sequencer's buffered progress set. This gives startup a visible dictionary-progress step even when snapshot checking/generation finished too early to still be relevant by the time annotation loading completes.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
id: TASK-145
|
||||||
|
title: Show checking and generation OSD for character dictionary auto-sync
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 11:20'
|
||||||
|
updated_date: '2026-03-09 11:20'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- overlay
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Surface an immediate startup OSD that the character dictionary is being checked, and show a distinct generating message only when the current AniList media actually needs a fresh snapshot build instead of reusing a cached one.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Auto-sync emits a `checking` progress event before snapshot resolution completes.
|
||||||
|
- [x] #2 Auto-sync emits `generating` only for snapshot cache misses and keeps `updating`/`importing` as later phases.
|
||||||
|
- [x] #3 Startup OSD sequencing still prioritizes tokenization then annotation loading before buffered dictionary progress.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Character dictionary auto-sync now emits `Checking character dictionary...` as soon as the AniList media is resolved, then emits `Generating character dictionary...` only when the snapshot layer misses and a real rebuild begins. Cached snapshots skip the generating phase and continue straight into the later update/import flow.
|
||||||
|
|
||||||
|
Wired those progress callbacks through the character-dictionary runtime boundary, updated the startup OSD sequencer to treat checking/generating as dictionary-progress phases with the same tokenization and annotation precedence, and added regression coverage for cache-hit vs cache-miss behavior plus buffered startup ordering.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
id: TASK-146
|
||||||
|
title: Forward overlay Tab to mpv for AniSkip
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- overlay
|
||||||
|
- aniskip
|
||||||
|
- linux
|
||||||
|
dependencies: []
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Fix visible-overlay keyboard handling so bare `Tab` is forwarded to mpv instead of being consumed by Electron focus navigation. This restores the default AniSkip `TAB` binding while the overlay has focus, especially on Linux.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Visible overlay forwards bare `Tab` to mpv as `keypress TAB`.
|
||||||
|
- [x] #2 Modal overlays keep their existing local `Tab` behavior.
|
||||||
|
- [x] #3 Automated regression coverage exists for the input handler and overlay factory wiring.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
|
||||||
|
1. Add a failing regression around visible-overlay `before-input-event` handling for bare `Tab`.
|
||||||
|
2. Add/extend overlay factory tests so the new mpv-forward callback is wired through runtime construction.
|
||||||
|
3. Patch overlay input handling to intercept visible-overlay `Tab` and send mpv `keypress TAB`.
|
||||||
|
4. Run focused overlay tests, typecheck, and changelog validation.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Extracted visible-overlay input handling into `src/core/services/overlay-window-input.ts` so the `Tab` forwarding decision can be unit tested without loading Electron window primitives.
|
||||||
|
|
||||||
|
Visible overlay `before-input-event` now intercepts bare `Tab`, prevents the browser default, and forwards mpv `keypress TAB` through the existing mpv runtime command path. Modal overlays remain unchanged.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/core/services/overlay-window.test.ts src/main/runtime/overlay-window-factory.test.ts src/main/runtime/overlay-window-factory-main-deps.test.ts src/main/runtime/overlay-window-runtime-handlers.test.ts`
|
||||||
|
- `bun x tsc --noEmit`
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Visible overlay focus no longer blocks the default AniSkip `Tab` binding. Bare `Tab` is now forwarded straight to mpv while the visible overlay is active, and modal overlays still retain their own normal focus behavior.
|
||||||
|
|
||||||
|
Added regression coverage for both the input-routing decision and the runtime plumbing that carries the new mpv forwarder into overlay window creation.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
id: TASK-148
|
||||||
|
title: Fix Windows plugin env binary override resolution
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- windows
|
||||||
|
- plugin
|
||||||
|
- regression
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Fix the mpv plugin's Windows binary override lookup so `SUBMINER_BINARY_PATH` still resolves when `SUBMINER_APPIMAGE_PATH` is unset. The current Lua resolver builds an array with a leading `nil`, which causes `ipairs` iteration to stop before the later Windows override candidate.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 `scripts/test-plugin-binary-windows.lua` passes the env override regression that expects `.exe` suffix resolution from `SUBMINER_BINARY_PATH`.
|
||||||
|
- [x] #2 Existing plugin start/binary test gate stays green after the fix.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Updated `plugin/subminer/binary.lua` so env override lookup checks `SUBMINER_APPIMAGE_PATH` and `SUBMINER_BINARY_PATH` sequentially instead of via a Lua array literal that truncates at the first `nil`. This restores Windows `.exe` suffix resolution for `SUBMINER_BINARY_PATH` when the AppImage env var is unset.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `lua scripts/test-plugin-binary-windows.lua`
|
||||||
|
- `bun run test:plugin:src`
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
id: TASK-149
|
||||||
|
title: Cut patch release v0.5.5 for character dictionary updates and release guarding
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 01:10'
|
||||||
|
updated_date: '2026-03-09 01:14'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-140
|
||||||
|
- TASK-141
|
||||||
|
- TASK-142
|
||||||
|
- TASK-143
|
||||||
|
- TASK-144
|
||||||
|
- TASK-145
|
||||||
|
- TASK-146
|
||||||
|
- TASK-148
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- scripts/build-changelog.ts
|
||||||
|
- scripts/build-changelog.test.ts
|
||||||
|
- docs/RELEASING.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Prepare and publish patch release `v0.5.5` after the failed `v0.5.4` tag by aligning package version metadata, generating committed changelog output from the pending release fragments, and hardening release validation so a future tag cannot ship with a mismatched `package.json` version.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Repository version metadata is updated to `0.5.5`.
|
||||||
|
- [x] #2 `CHANGELOG.md` contains the committed `v0.5.5` section and the consumed fragments are removed.
|
||||||
|
- [x] #3 Release validation rejects a requested release version when it differs from `package.json`.
|
||||||
|
- [x] #4 Release docs capture the required version/changelog prep before tagging.
|
||||||
|
- [x] #5 New `v0.5.5` release-prep commit and tag are pushed to `origin/main`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a regression test for tagged-release/package version mismatch.
|
||||||
|
2. Update changelog validation to reject mismatched explicit release versions.
|
||||||
|
3. Bump `package.json`, generate committed `v0.5.5` changelog output, and remove consumed fragments.
|
||||||
|
4. Add a short `docs/RELEASING.md` checklist for the prep flow.
|
||||||
|
5. Run release verification, commit, tag, and push.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added a regression test in `scripts/build-changelog.test.ts` that proves `changelog:check --version ...` rejects tag/package mismatches. Updated `scripts/build-changelog.ts` so tagged release validation now compares the explicit requested version against `package.json` before looking for pending fragments or the committed changelog section.
|
||||||
|
|
||||||
|
Bumped `package.json` from `0.5.3` to `0.5.5`, ran `bun run changelog:build --version 0.5.5 --date 2026-03-09`, and committed the generated `CHANGELOG.md` output while removing the consumed task fragments. Added `docs/RELEASING.md` with the required release-prep checklist so version bump + changelog generation happen before tagging.
|
||||||
|
|
||||||
|
Verification: `bun run changelog:lint`, `bun run changelog:check --version 0.5.5`, `bun run typecheck`, `bun run test:fast`, and `bun test scripts/build-changelog.test.ts src/release-workflow.test.ts`. `bun run format:check` still reports many unrelated pre-existing repo-wide Prettier warnings, so touched files were checked/formatted separately with `bunx prettier`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.5` after the failed `v0.5.4` release attempt. Release metadata now matches the upcoming tag, the pending character-dictionary/overlay/plugin fragments are committed into `CHANGELOG.md`, and release validation now blocks future tag/package mismatches before publish.
|
||||||
|
|
||||||
|
Docs now include a short release checklist in `docs/RELEASING.md`. Validation passed for changelog lint/check, typecheck, targeted workflow tests, and the full fast test suite. Repo-wide Prettier remains noisy from unrelated existing files, but touched release files were formatted and verified.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
153
build/installer.nsh
Normal file
153
build/installer.nsh
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
!include "MUI2.nsh"
|
||||||
|
!include "nsDialogs.nsh"
|
||||||
|
|
||||||
|
Var WindowsMpvShortcutStartMenuPath
|
||||||
|
Var WindowsMpvShortcutDesktopPath
|
||||||
|
|
||||||
|
!macro ResolveWindowsMpvShortcutPaths
|
||||||
|
!ifdef MENU_FILENAME
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuPath "$SMPROGRAMS\${MENU_FILENAME}\SubMiner mpv.lnk"
|
||||||
|
!else
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuPath "$SMPROGRAMS\SubMiner mpv.lnk"
|
||||||
|
!endif
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopPath "$DESKTOP\SubMiner mpv.lnk"
|
||||||
|
!macroend
|
||||||
|
|
||||||
|
!ifndef BUILD_UNINSTALLER
|
||||||
|
Var WindowsMpvShortcutStartMenuCheckbox
|
||||||
|
Var WindowsMpvShortcutDesktopCheckbox
|
||||||
|
Var WindowsMpvShortcutStartMenuEnabled
|
||||||
|
Var WindowsMpvShortcutDesktopEnabled
|
||||||
|
Var WindowsMpvShortcutDefaultsInitialized
|
||||||
|
|
||||||
|
!macro customInit
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
|
||||||
|
StrCpy $WindowsMpvShortcutDefaultsInitialized "0"
|
||||||
|
!macroend
|
||||||
|
|
||||||
|
!macro customPageAfterChangeDir
|
||||||
|
PageEx custom
|
||||||
|
PageCallbacks WindowsMpvShortcutPageCreate WindowsMpvShortcutPageLeave
|
||||||
|
Caption " "
|
||||||
|
PageExEnd
|
||||||
|
!macroend
|
||||||
|
|
||||||
|
Function HasExistingInstallation
|
||||||
|
ReadRegStr $0 SHELL_CONTEXT "Software\${APP_GUID}" InstallLocation
|
||||||
|
${if} $0 == ""
|
||||||
|
Push "0"
|
||||||
|
${else}
|
||||||
|
Push "1"
|
||||||
|
${endif}
|
||||||
|
FunctionEnd
|
||||||
|
|
||||||
|
Function InitializeWindowsMpvShortcutDefaults
|
||||||
|
${if} $WindowsMpvShortcutDefaultsInitialized == "1"
|
||||||
|
Return
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
!insertmacro ResolveWindowsMpvShortcutPaths
|
||||||
|
Call HasExistingInstallation
|
||||||
|
Pop $0
|
||||||
|
|
||||||
|
${if} $0 == "1"
|
||||||
|
${if} ${FileExists} "$WindowsMpvShortcutStartMenuPath"
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
|
||||||
|
${else}
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuEnabled "0"
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
${if} ${FileExists} "$WindowsMpvShortcutDesktopPath"
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
|
||||||
|
${else}
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopEnabled "0"
|
||||||
|
${endif}
|
||||||
|
${else}
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
StrCpy $WindowsMpvShortcutDefaultsInitialized "1"
|
||||||
|
FunctionEnd
|
||||||
|
|
||||||
|
Function WindowsMpvShortcutPageCreate
|
||||||
|
Call InitializeWindowsMpvShortcutDefaults
|
||||||
|
|
||||||
|
!insertmacro MUI_HEADER_TEXT "Windows mpv launcher" "Choose where to create the optional SubMiner mpv shortcuts."
|
||||||
|
|
||||||
|
nsDialogs::Create 1018
|
||||||
|
Pop $0
|
||||||
|
|
||||||
|
${NSD_CreateLabel} 0u 0u 300u 30u "SubMiner mpv launches SubMiner.exe --launch-mpv so people can open mpv with the SubMiner profile from a separate Windows shortcut."
|
||||||
|
Pop $0
|
||||||
|
|
||||||
|
${NSD_CreateCheckbox} 0u 44u 280u 12u "Create Start Menu shortcut"
|
||||||
|
Pop $WindowsMpvShortcutStartMenuCheckbox
|
||||||
|
${if} $WindowsMpvShortcutStartMenuEnabled == "1"
|
||||||
|
${NSD_Check} $WindowsMpvShortcutStartMenuCheckbox
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
${NSD_CreateCheckbox} 0u 64u 280u 12u "Create Desktop shortcut"
|
||||||
|
Pop $WindowsMpvShortcutDesktopCheckbox
|
||||||
|
${if} $WindowsMpvShortcutDesktopEnabled == "1"
|
||||||
|
${NSD_Check} $WindowsMpvShortcutDesktopCheckbox
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
${NSD_CreateLabel} 0u 90u 300u 24u "Upgrades preserve the current SubMiner mpv shortcut locations instead of recreating shortcuts you already removed."
|
||||||
|
Pop $0
|
||||||
|
|
||||||
|
nsDialogs::Show
|
||||||
|
FunctionEnd
|
||||||
|
|
||||||
|
Function WindowsMpvShortcutPageLeave
|
||||||
|
${NSD_GetState} $WindowsMpvShortcutStartMenuCheckbox $0
|
||||||
|
${if} $0 == ${BST_CHECKED}
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuEnabled "1"
|
||||||
|
${else}
|
||||||
|
StrCpy $WindowsMpvShortcutStartMenuEnabled "0"
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
${NSD_GetState} $WindowsMpvShortcutDesktopCheckbox $0
|
||||||
|
${if} $0 == ${BST_CHECKED}
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopEnabled "1"
|
||||||
|
${else}
|
||||||
|
StrCpy $WindowsMpvShortcutDesktopEnabled "0"
|
||||||
|
${endif}
|
||||||
|
FunctionEnd
|
||||||
|
|
||||||
|
!macro customInstall
|
||||||
|
Call InitializeWindowsMpvShortcutDefaults
|
||||||
|
!insertmacro ResolveWindowsMpvShortcutPaths
|
||||||
|
|
||||||
|
${if} $WindowsMpvShortcutStartMenuEnabled == "1"
|
||||||
|
!ifdef MENU_FILENAME
|
||||||
|
CreateDirectory "$SMPROGRAMS\${MENU_FILENAME}"
|
||||||
|
!endif
|
||||||
|
CreateShortCut "$WindowsMpvShortcutStartMenuPath" "$appExe" "--launch-mpv" "$appExe" 0 "" "" "Launch mpv with the SubMiner profile"
|
||||||
|
# electron-builder's upstream NSIS templates use the same WinShell call for AppUserModelID wiring.
|
||||||
|
# WinShell.dll comes from electron-builder's cached nsis-resources bundle, so bun run build:win needs no extra repo-local setup.
|
||||||
|
ClearErrors
|
||||||
|
WinShell::SetLnkAUMI "$WindowsMpvShortcutStartMenuPath" "${APP_ID}"
|
||||||
|
${else}
|
||||||
|
Delete "$WindowsMpvShortcutStartMenuPath"
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
${if} $WindowsMpvShortcutDesktopEnabled == "1"
|
||||||
|
CreateShortCut "$WindowsMpvShortcutDesktopPath" "$appExe" "--launch-mpv" "$appExe" 0 "" "" "Launch mpv with the SubMiner profile"
|
||||||
|
# ClearErrors keeps the optional AUMI assignment non-fatal if the packaging environment is missing WinShell.
|
||||||
|
ClearErrors
|
||||||
|
WinShell::SetLnkAUMI "$WindowsMpvShortcutDesktopPath" "${APP_ID}"
|
||||||
|
${else}
|
||||||
|
Delete "$WindowsMpvShortcutDesktopPath"
|
||||||
|
${endif}
|
||||||
|
|
||||||
|
System::Call 'Shell32::SHChangeNotify(i 0x8000000, i 0, i 0, i 0)'
|
||||||
|
!macroend
|
||||||
|
!endif
|
||||||
|
|
||||||
|
!macro customUnInstall
|
||||||
|
!insertmacro ResolveWindowsMpvShortcutPaths
|
||||||
|
Delete "$WindowsMpvShortcutStartMenuPath"
|
||||||
|
Delete "$WindowsMpvShortcutDesktopPath"
|
||||||
|
!macroend
|
||||||
21
build/signpath-windows-artifact-config.xml
Normal file
21
build/signpath-windows-artifact-config.xml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<artifact-configuration xmlns="http://signpath.io/artifact-configuration/v1">
|
||||||
|
<zip-file>
|
||||||
|
<pe-file path="SubMiner-*.exe" max-matches="unbounded">
|
||||||
|
<authenticode-sign />
|
||||||
|
</pe-file>
|
||||||
|
<zip-file path="SubMiner-*.zip" max-matches="unbounded">
|
||||||
|
<directory path="*">
|
||||||
|
<pe-file path="*.exe" max-matches="unbounded">
|
||||||
|
<authenticode-sign />
|
||||||
|
</pe-file>
|
||||||
|
<pe-file path="*.dll" max-matches="unbounded">
|
||||||
|
<authenticode-sign />
|
||||||
|
</pe-file>
|
||||||
|
<pe-file path="*.node" max-matches="unbounded">
|
||||||
|
<authenticode-sign />
|
||||||
|
</pe-file>
|
||||||
|
</directory>
|
||||||
|
</zip-file>
|
||||||
|
</zip-file>
|
||||||
|
</artifact-configuration>
|
||||||
27
bun.lock
27
bun.lock
@@ -9,6 +9,7 @@
|
|||||||
"commander": "^14.0.3",
|
"commander": "^14.0.3",
|
||||||
"discord-rpc": "^4.0.1",
|
"discord-rpc": "^4.0.1",
|
||||||
"jsonc-parser": "^3.3.1",
|
"jsonc-parser": "^3.3.1",
|
||||||
|
"libsql": "^0.5.22",
|
||||||
"ws": "^8.19.0",
|
"ws": "^8.19.0",
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -99,10 +100,30 @@
|
|||||||
|
|
||||||
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "7.1.2" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
|
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "7.1.2" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
|
||||||
|
|
||||||
|
"@libsql/darwin-arm64": ["@libsql/darwin-arm64@0.5.22", "", { "os": "darwin", "cpu": "arm64" }, "sha512-4B8ZlX3nIDPndfct7GNe0nI3Yw6ibocEicWdC4fvQbSs/jdq/RC2oCsoJxJ4NzXkvktX70C1J4FcmmoBy069UA=="],
|
||||||
|
|
||||||
|
"@libsql/darwin-x64": ["@libsql/darwin-x64@0.5.22", "", { "os": "darwin", "cpu": "x64" }, "sha512-ny2HYWt6lFSIdNFzUFIJ04uiW6finXfMNJ7wypkAD8Pqdm6nAByO+Fdqu8t7sD0sqJGeUCiOg480icjyQ2/8VA=="],
|
||||||
|
|
||||||
|
"@libsql/linux-arm-gnueabihf": ["@libsql/linux-arm-gnueabihf@0.5.22", "", { "os": "linux", "cpu": "arm" }, "sha512-3Uo3SoDPJe/zBnyZKosziRGtszXaEtv57raWrZIahtQDsjxBVjuzYQinCm9LRCJCUT5t2r5Z5nLDPJi2CwZVoA=="],
|
||||||
|
|
||||||
|
"@libsql/linux-arm-musleabihf": ["@libsql/linux-arm-musleabihf@0.5.22", "", { "os": "linux", "cpu": "arm" }, "sha512-LCsXh07jvSojTNJptT9CowOzwITznD+YFGGW+1XxUr7fS+7/ydUrpDfsMX7UqTqjm7xG17eq86VkWJgHJfvpNg=="],
|
||||||
|
|
||||||
|
"@libsql/linux-arm64-gnu": ["@libsql/linux-arm64-gnu@0.5.22", "", { "os": "linux", "cpu": "arm64" }, "sha512-KSdnOMy88c9mpOFKUEzPskSaF3VLflfSUCBwas/pn1/sV3pEhtMF6H8VUCd2rsedwoukeeCSEONqX7LLnQwRMA=="],
|
||||||
|
|
||||||
|
"@libsql/linux-arm64-musl": ["@libsql/linux-arm64-musl@0.5.22", "", { "os": "linux", "cpu": "arm64" }, "sha512-mCHSMAsDTLK5YH//lcV3eFEgiR23Ym0U9oEvgZA0667gqRZg/2px+7LshDvErEKv2XZ8ixzw3p1IrBzLQHGSsw=="],
|
||||||
|
|
||||||
|
"@libsql/linux-x64-gnu": ["@libsql/linux-x64-gnu@0.5.22", "", { "os": "linux", "cpu": "x64" }, "sha512-kNBHaIkSg78Y4BqAdgjcR2mBilZXs4HYkAmi58J+4GRwDQZh5fIUWbnQvB9f95DkWUIGVeenqLRFY2pcTmlsew=="],
|
||||||
|
|
||||||
|
"@libsql/linux-x64-musl": ["@libsql/linux-x64-musl@0.5.22", "", { "os": "linux", "cpu": "x64" }, "sha512-UZ4Xdxm4pu3pQXjvfJiyCzZop/9j/eA2JjmhMaAhe3EVLH2g11Fy4fwyUp9sT1QJYR1kpc2JLuybPM0kuXv/Tg=="],
|
||||||
|
|
||||||
|
"@libsql/win32-x64-msvc": ["@libsql/win32-x64-msvc@0.5.22", "", { "os": "win32", "cpu": "x64" }, "sha512-Fj0j8RnBpo43tVZUVoNK6BV/9AtDUM5S7DF3LB4qTYg1LMSZqi3yeCneUTLJD6XomQJlZzbI4mst89yspVSAnA=="],
|
||||||
|
|
||||||
"@malept/cross-spawn-promise": ["@malept/cross-spawn-promise@2.0.0", "", { "dependencies": { "cross-spawn": "7.0.6" } }, "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg=="],
|
"@malept/cross-spawn-promise": ["@malept/cross-spawn-promise@2.0.0", "", { "dependencies": { "cross-spawn": "7.0.6" } }, "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg=="],
|
||||||
|
|
||||||
"@malept/flatpak-bundler": ["@malept/flatpak-bundler@0.4.0", "", { "dependencies": { "debug": "4.4.3", "fs-extra": "9.1.0", "lodash": "4.17.23", "tmp-promise": "3.0.3" } }, "sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q=="],
|
"@malept/flatpak-bundler": ["@malept/flatpak-bundler@0.4.0", "", { "dependencies": { "debug": "4.4.3", "fs-extra": "9.1.0", "lodash": "4.17.23", "tmp-promise": "3.0.3" } }, "sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q=="],
|
||||||
|
|
||||||
|
"@neon-rs/load": ["@neon-rs/load@0.0.4", "", {}, "sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw=="],
|
||||||
|
|
||||||
"@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "7.1.4", "http-proxy-agent": "7.0.2", "https-proxy-agent": "7.0.6", "lru-cache": "10.4.3", "socks-proxy-agent": "8.0.5" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="],
|
"@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "7.1.4", "http-proxy-agent": "7.0.2", "https-proxy-agent": "7.0.6", "lru-cache": "10.4.3", "socks-proxy-agent": "8.0.5" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="],
|
||||||
|
|
||||||
"@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "7.7.4" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="],
|
"@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "7.7.4" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="],
|
||||||
@@ -255,7 +276,7 @@
|
|||||||
|
|
||||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||||
|
|
||||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
"detect-libc": ["detect-libc@2.0.2", "", {}, "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw=="],
|
||||||
|
|
||||||
"detect-node": ["detect-node@2.1.0", "", {}, "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g=="],
|
"detect-node": ["detect-node@2.1.0", "", {}, "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g=="],
|
||||||
|
|
||||||
@@ -433,6 +454,8 @@
|
|||||||
|
|
||||||
"lazy-val": ["lazy-val@1.0.5", "", {}, "sha512-0/BnGCCfyUMkBpeDgWihanIAF9JmZhHBgUhEqzvf+adhNGLoP6TaiI5oF8oyb3I45P+PcnrqihSf01M0l0G5+Q=="],
|
"lazy-val": ["lazy-val@1.0.5", "", {}, "sha512-0/BnGCCfyUMkBpeDgWihanIAF9JmZhHBgUhEqzvf+adhNGLoP6TaiI5oF8oyb3I45P+PcnrqihSf01M0l0G5+Q=="],
|
||||||
|
|
||||||
|
"libsql": ["libsql@0.5.22", "", { "dependencies": { "@neon-rs/load": "^0.0.4", "detect-libc": "2.0.2" }, "optionalDependencies": { "@libsql/darwin-arm64": "0.5.22", "@libsql/darwin-x64": "0.5.22", "@libsql/linux-arm-gnueabihf": "0.5.22", "@libsql/linux-arm-musleabihf": "0.5.22", "@libsql/linux-arm64-gnu": "0.5.22", "@libsql/linux-arm64-musl": "0.5.22", "@libsql/linux-x64-gnu": "0.5.22", "@libsql/linux-x64-musl": "0.5.22", "@libsql/win32-x64-msvc": "0.5.22" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "arm", "x64", "arm64", ] }, "sha512-NscWthMQt7fpU8lqd7LXMvT9pi+KhhmTHAJWUB/Lj6MWa0MKFv0F2V4C6WKKpjCVZl0VwcDz4nOI3CyaT1DDiA=="],
|
||||||
|
|
||||||
"lodash": ["lodash@4.17.23", "", {}, "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w=="],
|
"lodash": ["lodash@4.17.23", "", {}, "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w=="],
|
||||||
|
|
||||||
"log-symbols": ["log-symbols@4.1.0", "", { "dependencies": { "chalk": "4.1.2", "is-unicode-supported": "0.1.0" } }, "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="],
|
"log-symbols": ["log-symbols@4.1.0", "", { "dependencies": { "chalk": "4.1.2", "is-unicode-supported": "0.1.0" } }, "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="],
|
||||||
@@ -703,6 +726,8 @@
|
|||||||
|
|
||||||
"@electron/osx-sign/isbinaryfile": ["isbinaryfile@4.0.10", "", {}, "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw=="],
|
"@electron/osx-sign/isbinaryfile": ["isbinaryfile@4.0.10", "", {}, "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw=="],
|
||||||
|
|
||||||
|
"@electron/rebuild/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||||
|
|
||||||
"@electron/rebuild/semver": ["semver@7.7.4", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="],
|
"@electron/rebuild/semver": ["semver@7.7.4", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="],
|
||||||
|
|
||||||
"@electron/universal/fs-extra": ["fs-extra@11.3.3", "", { "dependencies": { "graceful-fs": "4.2.11", "jsonfile": "6.2.0", "universalify": "2.0.1" } }, "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg=="],
|
"@electron/universal/fs-extra": ["fs-extra@11.3.3", "", { "dependencies": { "graceful-fs": "4.2.11", "jsonfile": "6.2.0", "universalify": "2.0.1" } }, "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg=="],
|
||||||
|
|||||||
21
changes/README.md
Normal file
21
changes/README.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Changelog Fragments
|
||||||
|
|
||||||
|
Add one `.md` file per user-visible PR in this directory.
|
||||||
|
|
||||||
|
Use this format:
|
||||||
|
|
||||||
|
```md
|
||||||
|
type: added
|
||||||
|
area: overlay
|
||||||
|
|
||||||
|
- Added keyboard navigation for Yomitan popups.
|
||||||
|
- Added auto-pause toggle when opening the popup.
|
||||||
|
```
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
|
||||||
|
- `type` required: `added`, `changed`, `fixed`, `docs`, or `internal`
|
||||||
|
- `area` required: short product area like `overlay`, `launcher`, `release`
|
||||||
|
- each non-empty body line becomes a bullet
|
||||||
|
- `README.md` is ignored by the generator
|
||||||
|
- if a PR should not produce release notes, apply the `skip-changelog` label instead of adding a fragment
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
* SubMiner Example Configuration File
|
* SubMiner Example Configuration File
|
||||||
*
|
*
|
||||||
* This file is auto-generated from src/config/definitions.ts.
|
* This file is auto-generated from src/config/definitions.ts.
|
||||||
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
|
* Copy to %APPDATA%/SubMiner/config.jsonc on Windows, or $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) on Linux/macOS.
|
||||||
*/
|
*/
|
||||||
{
|
{
|
||||||
|
|
||||||
@@ -185,18 +185,33 @@
|
|||||||
"wordSpacing": 0, // Word spacing setting.
|
"wordSpacing": 0, // Word spacing setting.
|
||||||
"fontKerning": "normal", // Font kerning setting.
|
"fontKerning": "normal", // Font kerning setting.
|
||||||
"textRendering": "geometricPrecision", // Text rendering setting.
|
"textRendering": "geometricPrecision", // Text rendering setting.
|
||||||
"textShadow": "0 3px 10px rgba(0,0,0,0.69)", // Text shadow setting.
|
"textShadow": "0 2px 4px rgba(0,0,0,0.95), 0 0 8px rgba(0,0,0,0.8), 0 0 16px rgba(0,0,0,0.55)", // Text shadow setting.
|
||||||
"backgroundColor": "transparent", // Background color setting.
|
"backgroundColor": "rgba(20, 22, 34, 0.78)", // Background color setting.
|
||||||
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
||||||
"fontWeight": "normal", // Font weight setting.
|
"fontWeight": "600", // Font weight setting.
|
||||||
"fontStyle": "normal" // Font style setting.
|
"fontStyle": "normal" // Font style setting.
|
||||||
} // Secondary setting.
|
} // Secondary setting.
|
||||||
}, // Primary and secondary subtitle styling.
|
}, // Primary and secondary subtitle styling.
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Shared AI Provider
|
||||||
|
// Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
|
||||||
|
// ==========================================
|
||||||
|
"ai": {
|
||||||
|
"enabled": false, // Enable shared OpenAI-compatible AI provider features. Values: true | false
|
||||||
|
"apiKey": "", // Static API key for the shared OpenAI-compatible AI provider.
|
||||||
|
"apiKeyCommand": "", // Shell command used to resolve the shared AI provider API key.
|
||||||
|
"model": "openai/gpt-4o-mini", // Model setting.
|
||||||
|
"baseUrl": "https://openrouter.ai/api", // Base URL for the shared OpenAI-compatible AI provider.
|
||||||
|
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations.", // System prompt setting.
|
||||||
|
"requestTimeoutMs": 15000 // Timeout in milliseconds for shared AI provider requests.
|
||||||
|
}, // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// AnkiConnect Integration
|
// AnkiConnect Integration
|
||||||
// Automatic Anki updates and media generation options.
|
// Automatic Anki updates and media generation options.
|
||||||
// Hot-reload: AI translation settings update live while SubMiner is running.
|
// Hot-reload: ankiConnect.ai.enabled updates live while SubMiner is running.
|
||||||
|
// Shared AI provider transport settings are read from top-level ai and typically require restart.
|
||||||
// Most other AnkiConnect settings still require restart.
|
// Most other AnkiConnect settings still require restart.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"ankiConnect": {
|
"ankiConnect": {
|
||||||
@@ -220,13 +235,9 @@
|
|||||||
"translation": "SelectionText" // Translation setting.
|
"translation": "SelectionText" // Translation setting.
|
||||||
}, // Fields setting.
|
}, // Fields setting.
|
||||||
"ai": {
|
"ai": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enable AI provider usage for Anki translation/enrichment flows. Values: true | false
|
||||||
"alwaysUseAiTranslation": false, // Always use ai translation setting. Values: true | false
|
"model": "", // Optional model override for Anki AI translation/enrichment flows.
|
||||||
"apiKey": "", // Api key setting.
|
"systemPrompt": "" // Optional system prompt override for Anki AI translation/enrichment flows.
|
||||||
"model": "openai/gpt-4o-mini", // Model setting.
|
|
||||||
"baseUrl": "https://openrouter.ai/api", // Base url setting.
|
|
||||||
"targetLanguage": "English", // Target language setting.
|
|
||||||
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations." // System prompt setting.
|
|
||||||
}, // Ai setting.
|
}, // Ai setting.
|
||||||
"media": {
|
"media": {
|
||||||
"generateAudio": true, // Generate audio setting. Values: true | false
|
"generateAudio": true, // Generate audio setting. Values: true | false
|
||||||
@@ -284,17 +295,23 @@
|
|||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// YouTube Subtitle Generation
|
// YouTube Subtitle Generation
|
||||||
// Defaults for subminer YouTube subtitle extraction/transcription mode.
|
// Defaults for SubMiner YouTube subtitle generation.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"youtubeSubgen": {
|
"youtubeSubgen": {
|
||||||
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
|
|
||||||
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
|
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
|
||||||
"whisperModel": "", // Path to whisper model used for fallback transcription.
|
"whisperModel": "", // Path to whisper model used for fallback transcription.
|
||||||
|
"whisperVadModel": "", // Path to optional whisper VAD model used for subtitle generation.
|
||||||
|
"whisperThreads": 4, // Thread count passed to whisper.cpp subtitle generation runs.
|
||||||
|
"fixWithAi": false, // Use shared AI provider to post-process whisper-generated YouTube subtitles. Values: true | false
|
||||||
|
"ai": {
|
||||||
|
"model": "", // Optional model override for YouTube subtitle AI post-processing.
|
||||||
|
"systemPrompt": "" // Optional system prompt override for YouTube subtitle AI post-processing.
|
||||||
|
}, // Ai setting.
|
||||||
"primarySubLanguages": [
|
"primarySubLanguages": [
|
||||||
"ja",
|
"ja",
|
||||||
"jpn"
|
"jpn"
|
||||||
] // Comma-separated primary subtitle language priority used by the launcher.
|
] // Comma-separated primary subtitle language priority used by the launcher.
|
||||||
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
|
}, // Defaults for SubMiner YouTube subtitle generation.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Anilist
|
// Anilist
|
||||||
|
|||||||
21
docs/RELEASING.md
Normal file
21
docs/RELEASING.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
<!-- read_when: cutting a tagged release or debugging release prep -->
|
||||||
|
|
||||||
|
# Releasing
|
||||||
|
|
||||||
|
1. Confirm `main` is green: `gh run list --workflow CI --limit 5`.
|
||||||
|
2. Bump `package.json` to the release version.
|
||||||
|
3. Build release metadata before tagging:
|
||||||
|
`bun run changelog:build --version <version>`
|
||||||
|
4. Review `CHANGELOG.md`.
|
||||||
|
5. Run release gate locally:
|
||||||
|
`bun run changelog:check --version <version>`
|
||||||
|
`bun run test:fast`
|
||||||
|
`bun run typecheck`
|
||||||
|
6. Commit release prep.
|
||||||
|
7. Tag the commit: `git tag v<version>`.
|
||||||
|
8. Push commit + tag.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- `changelog:check` now rejects tag/package version mismatches.
|
||||||
|
- Do not tag while `changes/*.md` fragments still exist.
|
||||||
@@ -5,7 +5,7 @@ import { fail, log } from '../log.js';
|
|||||||
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
|
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
|
||||||
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
|
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
|
||||||
import {
|
import {
|
||||||
loadSubtitleIntoMpv,
|
cleanupPlaybackSession,
|
||||||
startMpv,
|
startMpv,
|
||||||
startOverlay,
|
startOverlay,
|
||||||
state,
|
state,
|
||||||
@@ -34,12 +34,7 @@ function checkDependencies(args: Args): void {
|
|||||||
missing.push('yt-dlp');
|
missing.push('yt-dlp');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (args.targetKind === 'url' && isYoutubeTarget(args.target) && !commandExists('ffmpeg')) {
|
||||||
args.targetKind === 'url' &&
|
|
||||||
isYoutubeTarget(args.target) &&
|
|
||||||
args.youtubeSubgenMode !== 'off' &&
|
|
||||||
!commandExists('ffmpeg')
|
|
||||||
) {
|
|
||||||
missing.push('ffmpeg');
|
missing.push('ffmpeg');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -164,22 +159,28 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
|
|||||||
const isYoutubeUrl = selectedTarget.kind === 'url' && isYoutubeTarget(selectedTarget.target);
|
const isYoutubeUrl = selectedTarget.kind === 'url' && isYoutubeTarget(selectedTarget.target);
|
||||||
let preloadedSubtitles: { primaryPath?: string; secondaryPath?: string } | undefined;
|
let preloadedSubtitles: { primaryPath?: string; secondaryPath?: string } | undefined;
|
||||||
|
|
||||||
if (isYoutubeUrl && args.youtubeSubgenMode === 'preprocess') {
|
if (isYoutubeUrl) {
|
||||||
log('info', args.logLevel, 'YouTube subtitle mode: preprocess');
|
log('info', args.logLevel, 'YouTube subtitle generation: preload before mpv');
|
||||||
const generated = await generateYoutubeSubtitles(selectedTarget.target, args);
|
const generated = await generateYoutubeSubtitles(selectedTarget.target, args);
|
||||||
preloadedSubtitles = {
|
preloadedSubtitles = {
|
||||||
primaryPath: generated.primaryPath,
|
primaryPath: generated.primaryPath,
|
||||||
secondaryPath: generated.secondaryPath,
|
secondaryPath: generated.secondaryPath,
|
||||||
};
|
};
|
||||||
|
const primaryStatus = generated.primaryPath
|
||||||
|
? 'ready'
|
||||||
|
: generated.primaryNative
|
||||||
|
? 'native'
|
||||||
|
: 'missing';
|
||||||
|
const secondaryStatus = generated.secondaryPath
|
||||||
|
? 'ready'
|
||||||
|
: generated.secondaryNative
|
||||||
|
? 'native'
|
||||||
|
: 'missing';
|
||||||
log(
|
log(
|
||||||
'info',
|
'info',
|
||||||
args.logLevel,
|
args.logLevel,
|
||||||
`YouTube preprocess result: primary=${generated.primaryPath ? 'ready' : 'missing'}, secondary=${generated.secondaryPath ? 'ready' : 'missing'}`,
|
`YouTube subtitle result: primary=${primaryStatus}, secondary=${secondaryStatus}`,
|
||||||
);
|
);
|
||||||
} else if (isYoutubeUrl && args.youtubeSubgenMode === 'automatic') {
|
|
||||||
log('info', args.logLevel, 'YouTube subtitle mode: automatic (background)');
|
|
||||||
} else if (isYoutubeUrl) {
|
|
||||||
log('info', args.logLevel, 'YouTube subtitle mode: off');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldPauseUntilOverlayReady =
|
const shouldPauseUntilOverlayReady =
|
||||||
@@ -201,26 +202,6 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
|
|||||||
{ startPaused: shouldPauseUntilOverlayReady },
|
{ startPaused: shouldPauseUntilOverlayReady },
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isYoutubeUrl && args.youtubeSubgenMode === 'automatic') {
|
|
||||||
void generateYoutubeSubtitles(selectedTarget.target, args, async (lang, subtitlePath) => {
|
|
||||||
try {
|
|
||||||
await loadSubtitleIntoMpv(mpvSocketPath, subtitlePath, lang === 'primary', args.logLevel);
|
|
||||||
} catch (error) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Generated subtitle ready but failed to load in mpv: ${(error as Error).message}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}).catch((error) => {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Background subtitle generation failed: ${(error as Error).message}`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const ready = await waitForUnixSocketReady(mpvSocketPath, 10000);
|
const ready = await waitForUnixSocketReady(mpvSocketPath, 10000);
|
||||||
const pluginAutoStartEnabled = pluginRuntimeConfig.autoStart;
|
const pluginAutoStartEnabled = pluginRuntimeConfig.autoStart;
|
||||||
const shouldStartOverlay = args.startOverlay || args.autoStartOverlay;
|
const shouldStartOverlay = args.startOverlay || args.autoStartOverlay;
|
||||||
@@ -264,9 +245,10 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
|
|||||||
}
|
}
|
||||||
|
|
||||||
const finalize = (code: number | null | undefined) => {
|
const finalize = (code: number | null | undefined) => {
|
||||||
stopOverlay(args);
|
void cleanupPlaybackSession(args).finally(() => {
|
||||||
processAdapter.setExitCode(code ?? 0);
|
processAdapter.setExitCode(code ?? 0);
|
||||||
resolve();
|
resolve();
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
if (mpvProc.exitCode !== null && mpvProc.exitCode !== undefined) {
|
if (mpvProc.exitCode !== null && mpvProc.exitCode !== undefined) {
|
||||||
|
|||||||
@@ -2,14 +2,32 @@ import test from 'node:test';
|
|||||||
import assert from 'node:assert/strict';
|
import assert from 'node:assert/strict';
|
||||||
import { parseLauncherYoutubeSubgenConfig } from './config/youtube-subgen-config.js';
|
import { parseLauncherYoutubeSubgenConfig } from './config/youtube-subgen-config.js';
|
||||||
import { parseLauncherJellyfinConfig } from './config/jellyfin-config.js';
|
import { parseLauncherJellyfinConfig } from './config/jellyfin-config.js';
|
||||||
import { parsePluginRuntimeConfigContent } from './config/plugin-runtime-config.js';
|
import {
|
||||||
|
getPluginConfigCandidates,
|
||||||
|
parsePluginRuntimeConfigContent,
|
||||||
|
} from './config/plugin-runtime-config.js';
|
||||||
|
import { getDefaultSocketPath } from './types.js';
|
||||||
|
|
||||||
test('parseLauncherYoutubeSubgenConfig keeps only valid typed values', () => {
|
test('parseLauncherYoutubeSubgenConfig keeps only valid typed values', () => {
|
||||||
const parsed = parseLauncherYoutubeSubgenConfig({
|
const parsed = parseLauncherYoutubeSubgenConfig({
|
||||||
|
ai: {
|
||||||
|
enabled: true,
|
||||||
|
apiKey: 'shared-key',
|
||||||
|
baseUrl: 'https://openrouter.ai/api',
|
||||||
|
model: 'openrouter/shared-model',
|
||||||
|
systemPrompt: 'Legacy shared prompt.',
|
||||||
|
requestTimeoutMs: 12000,
|
||||||
|
},
|
||||||
youtubeSubgen: {
|
youtubeSubgen: {
|
||||||
mode: 'preprocess',
|
|
||||||
whisperBin: '/usr/bin/whisper',
|
whisperBin: '/usr/bin/whisper',
|
||||||
whisperModel: '/models/base.bin',
|
whisperModel: '/models/base.bin',
|
||||||
|
whisperVadModel: '/models/vad.bin',
|
||||||
|
whisperThreads: 6.8,
|
||||||
|
fixWithAi: true,
|
||||||
|
ai: {
|
||||||
|
model: 'openrouter/subgen-model',
|
||||||
|
systemPrompt: 'Fix subtitles only.',
|
||||||
|
},
|
||||||
primarySubLanguages: ['ja', 42, 'en'],
|
primarySubLanguages: ['ja', 42, 'en'],
|
||||||
},
|
},
|
||||||
secondarySub: {
|
secondarySub: {
|
||||||
@@ -24,9 +42,17 @@ test('parseLauncherYoutubeSubgenConfig keeps only valid typed values', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
assert.equal(parsed.mode, 'preprocess');
|
assert.equal('mode' in parsed, false);
|
||||||
assert.deepEqual(parsed.primarySubLanguages, ['ja', 'en']);
|
assert.deepEqual(parsed.primarySubLanguages, ['ja', 'en']);
|
||||||
assert.deepEqual(parsed.secondarySubLanguages, ['eng', 'deu']);
|
assert.deepEqual(parsed.secondarySubLanguages, ['eng', 'deu']);
|
||||||
|
assert.equal(parsed.whisperVadModel, '/models/vad.bin');
|
||||||
|
assert.equal(parsed.whisperThreads, 6);
|
||||||
|
assert.equal(parsed.fixWithAi, true);
|
||||||
|
assert.equal(parsed.ai?.enabled, true);
|
||||||
|
assert.equal(parsed.ai?.apiKey, 'shared-key');
|
||||||
|
assert.equal(parsed.ai?.model, 'openrouter/subgen-model');
|
||||||
|
assert.equal(parsed.ai?.systemPrompt, 'Fix subtitles only.');
|
||||||
|
assert.equal(parsed.ai?.requestTimeoutMs, 12000);
|
||||||
assert.equal(parsed.jimakuLanguagePreference, 'ja');
|
assert.equal(parsed.jimakuLanguagePreference, 'ja');
|
||||||
assert.equal(parsed.jimakuMaxEntryResults, 8);
|
assert.equal(parsed.jimakuMaxEntryResults, 8);
|
||||||
});
|
});
|
||||||
@@ -75,3 +101,18 @@ auto_start_pause_until_ready = off
|
|||||||
assert.equal(parsed.autoStartVisibleOverlay, false);
|
assert.equal(parsed.autoStartVisibleOverlay, false);
|
||||||
assert.equal(parsed.autoStartPauseUntilReady, false);
|
assert.equal(parsed.autoStartPauseUntilReady, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('getPluginConfigCandidates resolves Windows mpv script-opts path', () => {
|
||||||
|
assert.deepEqual(
|
||||||
|
getPluginConfigCandidates({
|
||||||
|
platform: 'win32',
|
||||||
|
homeDir: 'C:\\Users\\tester',
|
||||||
|
appDataDir: 'C:\\Users\\tester\\AppData\\Roaming',
|
||||||
|
}),
|
||||||
|
['C:\\Users\\tester\\AppData\\Roaming\\mpv\\script-opts\\subminer.conf'],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('getDefaultSocketPath returns Windows named pipe default', () => {
|
||||||
|
assert.equal(getDefaultSocketPath('win32'), '\\\\.\\pipe\\subminer-socket');
|
||||||
|
});
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { resolveConfigFilePath } from '../src/config/path-resolution.js';
|
|||||||
|
|
||||||
export function resolveMainConfigPath(): string {
|
export function resolveMainConfigPath(): string {
|
||||||
return resolveConfigFilePath({
|
return resolveConfigFilePath({
|
||||||
|
appDataDir: process.env.APPDATA,
|
||||||
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
||||||
homeDir: os.homedir(),
|
homeDir: os.homedir(),
|
||||||
existsSync: fs.existsSync,
|
existsSync: fs.existsSync,
|
||||||
|
|||||||
@@ -1,13 +1,7 @@
|
|||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import { fail } from '../log.js';
|
import { fail } from '../log.js';
|
||||||
import type {
|
import type { Args, Backend, LauncherYoutubeSubgenConfig, LogLevel } from '../types.js';
|
||||||
Args,
|
|
||||||
Backend,
|
|
||||||
LauncherYoutubeSubgenConfig,
|
|
||||||
LogLevel,
|
|
||||||
YoutubeSubgenMode,
|
|
||||||
} from '../types.js';
|
|
||||||
import {
|
import {
|
||||||
DEFAULT_JIMAKU_API_BASE_URL,
|
DEFAULT_JIMAKU_API_BASE_URL,
|
||||||
DEFAULT_YOUTUBE_PRIMARY_SUB_LANGS,
|
DEFAULT_YOUTUBE_PRIMARY_SUB_LANGS,
|
||||||
@@ -54,14 +48,6 @@ function parseLogLevel(value: string): LogLevel {
|
|||||||
fail(`Invalid log level: ${value} (must be debug, info, warn, or error)`);
|
fail(`Invalid log level: ${value} (must be debug, info, warn, or error)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseYoutubeMode(value: string): YoutubeSubgenMode {
|
|
||||||
const normalized = value.toLowerCase();
|
|
||||||
if (normalized === 'automatic' || normalized === 'preprocess' || normalized === 'off') {
|
|
||||||
return normalized as YoutubeSubgenMode;
|
|
||||||
}
|
|
||||||
fail(`Invalid yt-subgen mode: ${value} (must be automatic, preprocess, or off)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseBackend(value: string): Backend {
|
function parseBackend(value: string): Backend {
|
||||||
if (value === 'auto' || value === 'hyprland' || value === 'x11' || value === 'macos') {
|
if (value === 'auto' || value === 'hyprland' || value === 'x11' || value === 'macos') {
|
||||||
return value as Backend;
|
return value as Backend;
|
||||||
@@ -91,13 +77,6 @@ function parseDictionaryTarget(value: string): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig): Args {
|
export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig): Args {
|
||||||
const envMode = (process.env.SUBMINER_YT_SUBGEN_MODE || '').toLowerCase();
|
|
||||||
const defaultMode: YoutubeSubgenMode =
|
|
||||||
envMode === 'preprocess' || envMode === 'off' || envMode === 'automatic'
|
|
||||||
? (envMode as YoutubeSubgenMode)
|
|
||||||
: launcherConfig.mode
|
|
||||||
? launcherConfig.mode
|
|
||||||
: 'automatic';
|
|
||||||
const configuredSecondaryLangs = uniqueNormalizedLangCodes(
|
const configuredSecondaryLangs = uniqueNormalizedLangCodes(
|
||||||
launcherConfig.secondarySubLanguages ?? [],
|
launcherConfig.secondarySubLanguages ?? [],
|
||||||
);
|
);
|
||||||
@@ -120,12 +99,18 @@ export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig):
|
|||||||
recursive: false,
|
recursive: false,
|
||||||
profile: 'subminer',
|
profile: 'subminer',
|
||||||
startOverlay: false,
|
startOverlay: false,
|
||||||
youtubeSubgenMode: defaultMode,
|
|
||||||
whisperBin: process.env.SUBMINER_WHISPER_BIN || launcherConfig.whisperBin || '',
|
whisperBin: process.env.SUBMINER_WHISPER_BIN || launcherConfig.whisperBin || '',
|
||||||
whisperModel: process.env.SUBMINER_WHISPER_MODEL || launcherConfig.whisperModel || '',
|
whisperModel: process.env.SUBMINER_WHISPER_MODEL || launcherConfig.whisperModel || '',
|
||||||
|
whisperVadModel: process.env.SUBMINER_WHISPER_VAD_MODEL || launcherConfig.whisperVadModel || '',
|
||||||
|
whisperThreads: (() => {
|
||||||
|
const envValue = Number.parseInt(process.env.SUBMINER_WHISPER_THREADS || '', 10);
|
||||||
|
if (Number.isInteger(envValue) && envValue > 0) return envValue;
|
||||||
|
return launcherConfig.whisperThreads || 4;
|
||||||
|
})(),
|
||||||
youtubeSubgenOutDir: process.env.SUBMINER_YT_SUBGEN_OUT_DIR || DEFAULT_YOUTUBE_SUBGEN_OUT_DIR,
|
youtubeSubgenOutDir: process.env.SUBMINER_YT_SUBGEN_OUT_DIR || DEFAULT_YOUTUBE_SUBGEN_OUT_DIR,
|
||||||
youtubeSubgenAudioFormat: process.env.SUBMINER_YT_SUBGEN_AUDIO_FORMAT || 'm4a',
|
youtubeSubgenAudioFormat: process.env.SUBMINER_YT_SUBGEN_AUDIO_FORMAT || 'm4a',
|
||||||
youtubeSubgenKeepTemp: process.env.SUBMINER_YT_SUBGEN_KEEP_TEMP === '1',
|
youtubeSubgenKeepTemp: process.env.SUBMINER_YT_SUBGEN_KEEP_TEMP === '1',
|
||||||
|
youtubeFixWithAi: launcherConfig.fixWithAi === true,
|
||||||
jimakuApiKey: process.env.SUBMINER_JIMAKU_API_KEY || '',
|
jimakuApiKey: process.env.SUBMINER_JIMAKU_API_KEY || '',
|
||||||
jimakuApiKeyCommand: process.env.SUBMINER_JIMAKU_API_KEY_COMMAND || '',
|
jimakuApiKeyCommand: process.env.SUBMINER_JIMAKU_API_KEY_COMMAND || '',
|
||||||
jimakuApiBaseUrl: process.env.SUBMINER_JIMAKU_API_BASE_URL || DEFAULT_JIMAKU_API_BASE_URL,
|
jimakuApiBaseUrl: process.env.SUBMINER_JIMAKU_API_BASE_URL || DEFAULT_JIMAKU_API_BASE_URL,
|
||||||
@@ -152,6 +137,15 @@ export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig):
|
|||||||
youtubeSecondarySubLangs: secondarySubLangs,
|
youtubeSecondarySubLangs: secondarySubLangs,
|
||||||
youtubeAudioLangs,
|
youtubeAudioLangs,
|
||||||
youtubeWhisperSourceLanguage: inferWhisperLanguage(primarySubLangs, 'ja'),
|
youtubeWhisperSourceLanguage: inferWhisperLanguage(primarySubLangs, 'ja'),
|
||||||
|
aiConfig: {
|
||||||
|
enabled: launcherConfig.ai?.enabled,
|
||||||
|
apiKey: launcherConfig.ai?.apiKey,
|
||||||
|
apiKeyCommand: launcherConfig.ai?.apiKeyCommand,
|
||||||
|
baseUrl: launcherConfig.ai?.baseUrl,
|
||||||
|
model: launcherConfig.ai?.model,
|
||||||
|
systemPrompt: launcherConfig.ai?.systemPrompt,
|
||||||
|
requestTimeoutMs: launcherConfig.ai?.requestTimeoutMs,
|
||||||
|
},
|
||||||
useTexthooker: true,
|
useTexthooker: true,
|
||||||
autoStartOverlay: false,
|
autoStartOverlay: false,
|
||||||
texthookerOnly: false,
|
texthookerOnly: false,
|
||||||
@@ -242,8 +236,6 @@ export function applyInvocationsToArgs(parsed: Args, invocations: CliInvocations
|
|||||||
if (invocations.ytInvocation) {
|
if (invocations.ytInvocation) {
|
||||||
if (invocations.ytInvocation.logLevel)
|
if (invocations.ytInvocation.logLevel)
|
||||||
parsed.logLevel = parseLogLevel(invocations.ytInvocation.logLevel);
|
parsed.logLevel = parseLogLevel(invocations.ytInvocation.logLevel);
|
||||||
if (invocations.ytInvocation.mode)
|
|
||||||
parsed.youtubeSubgenMode = parseYoutubeMode(invocations.ytInvocation.mode);
|
|
||||||
if (invocations.ytInvocation.outDir)
|
if (invocations.ytInvocation.outDir)
|
||||||
parsed.youtubeSubgenOutDir = invocations.ytInvocation.outDir;
|
parsed.youtubeSubgenOutDir = invocations.ytInvocation.outDir;
|
||||||
if (invocations.ytInvocation.keepTemp) parsed.youtubeSubgenKeepTemp = true;
|
if (invocations.ytInvocation.keepTemp) parsed.youtubeSubgenKeepTemp = true;
|
||||||
@@ -251,6 +243,10 @@ export function applyInvocationsToArgs(parsed: Args, invocations: CliInvocations
|
|||||||
parsed.whisperBin = invocations.ytInvocation.whisperBin;
|
parsed.whisperBin = invocations.ytInvocation.whisperBin;
|
||||||
if (invocations.ytInvocation.whisperModel)
|
if (invocations.ytInvocation.whisperModel)
|
||||||
parsed.whisperModel = invocations.ytInvocation.whisperModel;
|
parsed.whisperModel = invocations.ytInvocation.whisperModel;
|
||||||
|
if (invocations.ytInvocation.whisperVadModel)
|
||||||
|
parsed.whisperVadModel = invocations.ytInvocation.whisperVadModel;
|
||||||
|
if (invocations.ytInvocation.whisperThreads)
|
||||||
|
parsed.whisperThreads = invocations.ytInvocation.whisperThreads;
|
||||||
if (invocations.ytInvocation.ytSubgenAudioFormat) {
|
if (invocations.ytInvocation.ytSubgenAudioFormat) {
|
||||||
parsed.youtubeSubgenAudioFormat = invocations.ytInvocation.ytSubgenAudioFormat;
|
parsed.youtubeSubgenAudioFormat = invocations.ytInvocation.ytSubgenAudioFormat;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,11 +16,12 @@ export interface JellyfinInvocation {
|
|||||||
|
|
||||||
export interface YtInvocation {
|
export interface YtInvocation {
|
||||||
target?: string;
|
target?: string;
|
||||||
mode?: string;
|
|
||||||
outDir?: string;
|
outDir?: string;
|
||||||
keepTemp?: boolean;
|
keepTemp?: boolean;
|
||||||
whisperBin?: string;
|
whisperBin?: string;
|
||||||
whisperModel?: string;
|
whisperModel?: string;
|
||||||
|
whisperVadModel?: string;
|
||||||
|
whisperThreads?: number;
|
||||||
ytSubgenAudioFormat?: string;
|
ytSubgenAudioFormat?: string;
|
||||||
logLevel?: string;
|
logLevel?: string;
|
||||||
}
|
}
|
||||||
@@ -201,21 +202,27 @@ export function parseCliPrograms(
|
|||||||
.alias('youtube')
|
.alias('youtube')
|
||||||
.description('YouTube workflows')
|
.description('YouTube workflows')
|
||||||
.argument('[target]', 'YouTube URL or ytsearch: query')
|
.argument('[target]', 'YouTube URL or ytsearch: query')
|
||||||
.option('-m, --mode <mode>', 'Subtitle generation mode')
|
|
||||||
.option('-o, --out-dir <dir>', 'Subtitle output dir')
|
.option('-o, --out-dir <dir>', 'Subtitle output dir')
|
||||||
.option('--keep-temp', 'Keep temp files')
|
.option('--keep-temp', 'Keep temp files')
|
||||||
.option('--whisper-bin <path>', 'whisper.cpp CLI path')
|
.option('--whisper-bin <path>', 'whisper.cpp CLI path')
|
||||||
.option('--whisper-model <path>', 'whisper model path')
|
.option('--whisper-model <path>', 'whisper model path')
|
||||||
|
.option('--whisper-vad-model <path>', 'whisper.cpp VAD model path')
|
||||||
|
.option('--whisper-threads <n>', 'whisper.cpp thread count')
|
||||||
.option('--yt-subgen-audio-format <format>', 'Audio extraction format')
|
.option('--yt-subgen-audio-format <format>', 'Audio extraction format')
|
||||||
.option('--log-level <level>', 'Log level')
|
.option('--log-level <level>', 'Log level')
|
||||||
.action((target: string | undefined, options: Record<string, unknown>) => {
|
.action((target: string | undefined, options: Record<string, unknown>) => {
|
||||||
ytInvocation = {
|
ytInvocation = {
|
||||||
target,
|
target,
|
||||||
mode: typeof options.mode === 'string' ? options.mode : undefined,
|
|
||||||
outDir: typeof options.outDir === 'string' ? options.outDir : undefined,
|
outDir: typeof options.outDir === 'string' ? options.outDir : undefined,
|
||||||
keepTemp: options.keepTemp === true,
|
keepTemp: options.keepTemp === true,
|
||||||
whisperBin: typeof options.whisperBin === 'string' ? options.whisperBin : undefined,
|
whisperBin: typeof options.whisperBin === 'string' ? options.whisperBin : undefined,
|
||||||
whisperModel: typeof options.whisperModel === 'string' ? options.whisperModel : undefined,
|
whisperModel: typeof options.whisperModel === 'string' ? options.whisperModel : undefined,
|
||||||
|
whisperVadModel:
|
||||||
|
typeof options.whisperVadModel === 'string' ? options.whisperVadModel : undefined,
|
||||||
|
whisperThreads:
|
||||||
|
typeof options.whisperThreads === 'number' && Number.isFinite(options.whisperThreads)
|
||||||
|
? Math.floor(options.whisperThreads)
|
||||||
|
: undefined,
|
||||||
ytSubgenAudioFormat:
|
ytSubgenAudioFormat:
|
||||||
typeof options.ytSubgenAudioFormat === 'string' ? options.ytSubgenAudioFormat : undefined,
|
typeof options.ytSubgenAudioFormat === 'string' ? options.ytSubgenAudioFormat : undefined,
|
||||||
logLevel: typeof options.logLevel === 'string' ? options.logLevel : undefined,
|
logLevel: typeof options.logLevel === 'string' ? options.logLevel : undefined,
|
||||||
|
|||||||
@@ -5,12 +5,36 @@ import { log } from '../log.js';
|
|||||||
import type { LogLevel, PluginRuntimeConfig } from '../types.js';
|
import type { LogLevel, PluginRuntimeConfig } from '../types.js';
|
||||||
import { DEFAULT_SOCKET_PATH } from '../types.js';
|
import { DEFAULT_SOCKET_PATH } from '../types.js';
|
||||||
|
|
||||||
export function getPluginConfigCandidates(): string[] {
|
function getPlatformPath(platform: NodeJS.Platform): typeof path.posix | typeof path.win32 {
|
||||||
const xdgConfigHome = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config');
|
return platform === 'win32' ? path.win32 : path.posix;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPluginConfigCandidates(options?: {
|
||||||
|
platform?: NodeJS.Platform;
|
||||||
|
homeDir?: string;
|
||||||
|
xdgConfigHome?: string;
|
||||||
|
appDataDir?: string;
|
||||||
|
}): string[] {
|
||||||
|
const platform = options?.platform ?? process.platform;
|
||||||
|
const homeDir = options?.homeDir ?? os.homedir();
|
||||||
|
const platformPath = getPlatformPath(platform);
|
||||||
|
|
||||||
|
if (platform === 'win32') {
|
||||||
|
const appDataDir =
|
||||||
|
options?.appDataDir?.trim() ||
|
||||||
|
process.env.APPDATA?.trim() ||
|
||||||
|
platformPath.join(homeDir, 'AppData', 'Roaming');
|
||||||
|
return [platformPath.join(appDataDir, 'mpv', 'script-opts', 'subminer.conf')];
|
||||||
|
}
|
||||||
|
|
||||||
|
const xdgConfigHome =
|
||||||
|
options?.xdgConfigHome?.trim() ||
|
||||||
|
process.env.XDG_CONFIG_HOME ||
|
||||||
|
platformPath.join(homeDir, '.config');
|
||||||
return Array.from(
|
return Array.from(
|
||||||
new Set([
|
new Set([
|
||||||
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
|
platformPath.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
|
||||||
path.join(os.homedir(), '.config', 'mpv', 'script-opts', 'subminer.conf'),
|
platformPath.join(homeDir, '.config', 'mpv', 'script-opts', 'subminer.conf'),
|
||||||
]),
|
]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { resolveConfigFilePath } from '../../src/config/path-resolution.js';
|
|||||||
|
|
||||||
export function resolveLauncherMainConfigPath(): string {
|
export function resolveLauncherMainConfigPath(): string {
|
||||||
return resolveConfigFilePath({
|
return resolveConfigFilePath({
|
||||||
|
appDataDir: process.env.APPDATA,
|
||||||
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
||||||
homeDir: os.homedir(),
|
homeDir: os.homedir(),
|
||||||
existsSync: fs.existsSync,
|
existsSync: fs.existsSync,
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import type { LauncherYoutubeSubgenConfig } from '../types.js';
|
import type { LauncherYoutubeSubgenConfig } from '../types.js';
|
||||||
|
import { mergeAiConfig } from '../../src/ai/config.js';
|
||||||
|
|
||||||
function asStringArray(value: unknown): string[] | undefined {
|
function asStringArray(value: unknown): string[] | undefined {
|
||||||
if (!Array.isArray(value)) return undefined;
|
if (!Array.isArray(value)) return undefined;
|
||||||
@@ -21,17 +22,58 @@ export function parseLauncherYoutubeSubgenConfig(
|
|||||||
const jimakuRaw = root.jimaku;
|
const jimakuRaw = root.jimaku;
|
||||||
const jimaku =
|
const jimaku =
|
||||||
jimakuRaw && typeof jimakuRaw === 'object' ? (jimakuRaw as Record<string, unknown>) : null;
|
jimakuRaw && typeof jimakuRaw === 'object' ? (jimakuRaw as Record<string, unknown>) : null;
|
||||||
|
const aiRaw = root.ai;
|
||||||
|
const ai = aiRaw && typeof aiRaw === 'object' ? (aiRaw as Record<string, unknown>) : null;
|
||||||
|
const youtubeAiRaw = youtubeSubgen?.ai;
|
||||||
|
const youtubeAi =
|
||||||
|
youtubeAiRaw && typeof youtubeAiRaw === 'object'
|
||||||
|
? (youtubeAiRaw as Record<string, unknown>)
|
||||||
|
: null;
|
||||||
|
|
||||||
const mode = youtubeSubgen?.mode;
|
|
||||||
const jimakuLanguagePreference = jimaku?.languagePreference;
|
const jimakuLanguagePreference = jimaku?.languagePreference;
|
||||||
const jimakuMaxEntryResults = jimaku?.maxEntryResults;
|
const jimakuMaxEntryResults = jimaku?.maxEntryResults;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
mode: mode === 'automatic' || mode === 'preprocess' || mode === 'off' ? mode : undefined,
|
|
||||||
whisperBin:
|
whisperBin:
|
||||||
typeof youtubeSubgen?.whisperBin === 'string' ? youtubeSubgen.whisperBin : undefined,
|
typeof youtubeSubgen?.whisperBin === 'string' ? youtubeSubgen.whisperBin : undefined,
|
||||||
whisperModel:
|
whisperModel:
|
||||||
typeof youtubeSubgen?.whisperModel === 'string' ? youtubeSubgen.whisperModel : undefined,
|
typeof youtubeSubgen?.whisperModel === 'string' ? youtubeSubgen.whisperModel : undefined,
|
||||||
|
whisperVadModel:
|
||||||
|
typeof youtubeSubgen?.whisperVadModel === 'string'
|
||||||
|
? youtubeSubgen.whisperVadModel
|
||||||
|
: undefined,
|
||||||
|
whisperThreads:
|
||||||
|
typeof youtubeSubgen?.whisperThreads === 'number' &&
|
||||||
|
Number.isFinite(youtubeSubgen.whisperThreads) &&
|
||||||
|
youtubeSubgen.whisperThreads > 0
|
||||||
|
? Math.floor(youtubeSubgen.whisperThreads)
|
||||||
|
: undefined,
|
||||||
|
fixWithAi: typeof youtubeSubgen?.fixWithAi === 'boolean' ? youtubeSubgen.fixWithAi : undefined,
|
||||||
|
ai: mergeAiConfig(
|
||||||
|
ai
|
||||||
|
? {
|
||||||
|
enabled: typeof ai.enabled === 'boolean' ? ai.enabled : undefined,
|
||||||
|
apiKey: typeof ai.apiKey === 'string' ? ai.apiKey : undefined,
|
||||||
|
apiKeyCommand: typeof ai.apiKeyCommand === 'string' ? ai.apiKeyCommand : undefined,
|
||||||
|
baseUrl: typeof ai.baseUrl === 'string' ? ai.baseUrl : undefined,
|
||||||
|
model: typeof ai.model === 'string' ? ai.model : undefined,
|
||||||
|
systemPrompt: typeof ai.systemPrompt === 'string' ? ai.systemPrompt : undefined,
|
||||||
|
requestTimeoutMs:
|
||||||
|
typeof ai.requestTimeoutMs === 'number' &&
|
||||||
|
Number.isFinite(ai.requestTimeoutMs) &&
|
||||||
|
ai.requestTimeoutMs > 0
|
||||||
|
? Math.floor(ai.requestTimeoutMs)
|
||||||
|
: undefined,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
youtubeAi
|
||||||
|
? {
|
||||||
|
model: typeof youtubeAi.model === 'string' ? youtubeAi.model : undefined,
|
||||||
|
systemPrompt:
|
||||||
|
typeof youtubeAi.systemPrompt === 'string' ? youtubeAi.systemPrompt : undefined,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
),
|
||||||
primarySubLanguages: asStringArray(youtubeSubgen?.primarySubLanguages),
|
primarySubLanguages: asStringArray(youtubeSubgen?.primarySubLanguages),
|
||||||
secondarySubLanguages: asStringArray(secondarySub?.secondarySubLanguages),
|
secondarySubLanguages: asStringArray(secondarySub?.secondarySubLanguages),
|
||||||
jimakuApiKey: typeof jimaku?.apiKey === 'string' ? jimaku.apiKey : undefined,
|
jimakuApiKey: typeof jimaku?.apiKey === 'string' ? jimaku.apiKey : undefined,
|
||||||
|
|||||||
24
launcher/log.test.ts
Normal file
24
launcher/log.test.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { getDefaultMpvLogFile } from './types.js';
|
||||||
|
|
||||||
|
test('getDefaultMpvLogFile uses APPDATA on windows', () => {
|
||||||
|
const resolved = getDefaultMpvLogFile({
|
||||||
|
platform: 'win32',
|
||||||
|
homeDir: 'C:\\Users\\tester',
|
||||||
|
appDataDir: 'C:\\Users\\tester\\AppData\\Roaming',
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
path.normalize(resolved),
|
||||||
|
path.normalize(
|
||||||
|
path.join(
|
||||||
|
'C:\\Users\\tester\\AppData\\Roaming',
|
||||||
|
'SubMiner',
|
||||||
|
'logs',
|
||||||
|
`SubMiner-${new Date().toISOString().slice(0, 10)}.log`,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
@@ -51,10 +51,16 @@ function runLauncher(argv: string[], env: NodeJS.ProcessEnv): RunResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function makeTestEnv(homeDir: string, xdgConfigHome: string): NodeJS.ProcessEnv {
|
function makeTestEnv(homeDir: string, xdgConfigHome: string): NodeJS.ProcessEnv {
|
||||||
|
const pathValue = process.env.Path || process.env.PATH || '';
|
||||||
return {
|
return {
|
||||||
...process.env,
|
...process.env,
|
||||||
HOME: homeDir,
|
HOME: homeDir,
|
||||||
|
USERPROFILE: homeDir,
|
||||||
|
APPDATA: xdgConfigHome,
|
||||||
|
LOCALAPPDATA: path.join(homeDir, 'AppData', 'Local'),
|
||||||
XDG_CONFIG_HOME: xdgConfigHome,
|
XDG_CONFIG_HOME: xdgConfigHome,
|
||||||
|
PATH: pathValue,
|
||||||
|
Path: pathValue,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,13 +81,14 @@ test('config path uses XDG_CONFIG_HOME override', () => {
|
|||||||
test('config discovery ignores lowercase subminer candidate', () => {
|
test('config discovery ignores lowercase subminer candidate', () => {
|
||||||
const homeDir = '/home/tester';
|
const homeDir = '/home/tester';
|
||||||
const xdgConfigHome = '/tmp/xdg-config';
|
const xdgConfigHome = '/tmp/xdg-config';
|
||||||
const expected = path.join(xdgConfigHome, 'SubMiner', 'config.jsonc');
|
const expected = path.posix.join(xdgConfigHome, 'SubMiner', 'config.jsonc');
|
||||||
const foundPaths = new Set([path.join(xdgConfigHome, 'subminer', 'config.json')]);
|
const foundPaths = new Set([path.posix.join(xdgConfigHome, 'subminer', 'config.json')]);
|
||||||
|
|
||||||
const resolved = resolveConfigFilePath({
|
const resolved = resolveConfigFilePath({
|
||||||
xdgConfigHome,
|
xdgConfigHome,
|
||||||
homeDir,
|
homeDir,
|
||||||
existsSync: (candidate) => foundPaths.has(path.normalize(candidate)),
|
platform: 'linux',
|
||||||
|
existsSync: (candidate) => foundPaths.has(path.posix.normalize(candidate)),
|
||||||
});
|
});
|
||||||
|
|
||||||
assert.equal(resolved, expected);
|
assert.equal(resolved, expected);
|
||||||
@@ -138,6 +145,12 @@ test('mpv status exits non-zero when socket is not ready', () => {
|
|||||||
withTempDir((root) => {
|
withTempDir((root) => {
|
||||||
const homeDir = path.join(root, 'home');
|
const homeDir = path.join(root, 'home');
|
||||||
const xdgConfigHome = path.join(root, 'xdg');
|
const xdgConfigHome = path.join(root, 'xdg');
|
||||||
|
const socketPath = path.join(root, 'missing.sock');
|
||||||
|
fs.mkdirSync(path.join(xdgConfigHome, 'mpv', 'script-opts'), { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
|
||||||
|
`socket_path=${socketPath}\n`,
|
||||||
|
);
|
||||||
const result = runLauncher(['mpv', 'status'], makeTestEnv(homeDir, xdgConfigHome));
|
const result = runLauncher(['mpv', 'status'], makeTestEnv(homeDir, xdgConfigHome));
|
||||||
|
|
||||||
assert.equal(result.status, 1);
|
assert.equal(result.status, 1);
|
||||||
@@ -152,6 +165,7 @@ test('doctor reports checks and exits non-zero without hard dependencies', () =>
|
|||||||
const env = {
|
const env = {
|
||||||
...makeTestEnv(homeDir, xdgConfigHome),
|
...makeTestEnv(homeDir, xdgConfigHome),
|
||||||
PATH: '',
|
PATH: '',
|
||||||
|
Path: '',
|
||||||
};
|
};
|
||||||
const result = runLauncher(['doctor'], env);
|
const result = runLauncher(['doctor'], env);
|
||||||
|
|
||||||
@@ -162,6 +176,136 @@ test('doctor reports checks and exits non-zero without hard dependencies', () =>
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('youtube command rejects removed --mode option', () => {
|
||||||
|
withTempDir((root) => {
|
||||||
|
const homeDir = path.join(root, 'home');
|
||||||
|
const xdgConfigHome = path.join(root, 'xdg');
|
||||||
|
const appPath = path.join(root, 'fake-subminer.sh');
|
||||||
|
fs.writeFileSync(appPath, '#!/bin/sh\nexit 0\n');
|
||||||
|
fs.chmodSync(appPath, 0o755);
|
||||||
|
|
||||||
|
const env = {
|
||||||
|
...makeTestEnv(homeDir, xdgConfigHome),
|
||||||
|
SUBMINER_APPIMAGE_PATH: appPath,
|
||||||
|
};
|
||||||
|
const result = runLauncher(
|
||||||
|
['youtube', 'https://www.youtube.com/watch?v=test123', '--mode', 'automatic'],
|
||||||
|
env,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(result.status, 1);
|
||||||
|
assert.match(result.stderr, /unknown option '--mode'/i);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('youtube playback generates subtitles before mpv launch', { timeout: 15000 }, () => {
|
||||||
|
withTempDir((root) => {
|
||||||
|
const homeDir = path.join(root, 'home');
|
||||||
|
const xdgConfigHome = path.join(root, 'xdg');
|
||||||
|
const binDir = path.join(root, 'bin');
|
||||||
|
const appPath = path.join(root, 'fake-subminer.sh');
|
||||||
|
const ytdlpLogPath = path.join(root, 'yt-dlp.log');
|
||||||
|
const mpvCapturePath = path.join(root, 'mpv-order.txt');
|
||||||
|
const mpvArgsPath = path.join(root, 'mpv-args.txt');
|
||||||
|
const socketPath = path.join(root, 'mpv.sock');
|
||||||
|
const bunBinary = JSON.stringify(process.execPath.replace(/\\/g, '/'));
|
||||||
|
|
||||||
|
fs.mkdirSync(binDir, { recursive: true });
|
||||||
|
fs.mkdirSync(path.join(xdgConfigHome, 'SubMiner'), { recursive: true });
|
||||||
|
fs.mkdirSync(path.join(xdgConfigHome, 'mpv', 'script-opts'), { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(xdgConfigHome, 'SubMiner', 'setup-state.json'),
|
||||||
|
JSON.stringify({
|
||||||
|
version: 1,
|
||||||
|
status: 'completed',
|
||||||
|
completedAt: '2026-03-08T00:00:00.000Z',
|
||||||
|
completionSource: 'user',
|
||||||
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
|
pluginInstallStatus: 'installed',
|
||||||
|
pluginInstallPathSummary: null,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
|
||||||
|
`socket_path=${socketPath}\nauto_start=no\nauto_start_visible_overlay=no\nauto_start_pause_until_ready=no\n`,
|
||||||
|
);
|
||||||
|
fs.writeFileSync(appPath, '#!/bin/sh\nexit 0\n');
|
||||||
|
fs.chmodSync(appPath, 0o755);
|
||||||
|
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(binDir, 'yt-dlp'),
|
||||||
|
`#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
printf '%s\\n' "$*" >> "$SUBMINER_TEST_YTDLP_LOG"
|
||||||
|
if printf '%s\\n' "$*" | grep -q -- '--dump-single-json'; then
|
||||||
|
printf '{"id":"video123"}\\n'
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
out_dir=""
|
||||||
|
prev=""
|
||||||
|
for arg in "$@"; do
|
||||||
|
if [ "$prev" = "-o" ]; then
|
||||||
|
out_dir=$(dirname "$arg")
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
prev="$arg"
|
||||||
|
done
|
||||||
|
mkdir -p "$out_dir"
|
||||||
|
printf '1\\n00:00:00,000 --> 00:00:01,000\\nこんにちは\\n' > "$out_dir/video123.ja.srt"
|
||||||
|
printf '1\\n00:00:00,000 --> 00:00:01,000\\nhello\\n' > "$out_dir/video123.en.srt"
|
||||||
|
`,
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.chmodSync(path.join(binDir, 'yt-dlp'), 0o755);
|
||||||
|
|
||||||
|
fs.writeFileSync(path.join(binDir, 'ffmpeg'), '#!/bin/sh\nexit 0\n', 'utf8');
|
||||||
|
fs.chmodSync(path.join(binDir, 'ffmpeg'), 0o755);
|
||||||
|
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(binDir, 'mpv'),
|
||||||
|
`#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
if [ -s "$SUBMINER_TEST_YTDLP_LOG" ]; then
|
||||||
|
printf 'generated-before-mpv\\n' > "$SUBMINER_TEST_MPV_ORDER"
|
||||||
|
else
|
||||||
|
printf 'mpv-before-generation\\n' > "$SUBMINER_TEST_MPV_ORDER"
|
||||||
|
fi
|
||||||
|
printf '%s\\n' "$@" > "$SUBMINER_TEST_MPV_ARGS"
|
||||||
|
socket_path=""
|
||||||
|
for arg in "$@"; do
|
||||||
|
case "$arg" in
|
||||||
|
--input-ipc-server=*)
|
||||||
|
socket_path="\${arg#--input-ipc-server=}"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); const socket=process.argv[1]; try { fs.rmSync(socket,{force:true}); } catch {} const server=net.createServer((conn)=>conn.end()); server.listen(socket,()=>setTimeout(()=>server.close(()=>process.exit(0)),250));" "$socket_path"
|
||||||
|
`,
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
|
||||||
|
|
||||||
|
const env = {
|
||||||
|
...makeTestEnv(homeDir, xdgConfigHome),
|
||||||
|
PATH: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
|
||||||
|
Path: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
|
||||||
|
SUBMINER_APPIMAGE_PATH: appPath,
|
||||||
|
SUBMINER_TEST_YTDLP_LOG: ytdlpLogPath,
|
||||||
|
SUBMINER_TEST_MPV_ORDER: mpvCapturePath,
|
||||||
|
SUBMINER_TEST_MPV_ARGS: mpvArgsPath,
|
||||||
|
};
|
||||||
|
const result = runLauncher(['youtube', 'https://www.youtube.com/watch?v=test123'], env);
|
||||||
|
|
||||||
|
assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`);
|
||||||
|
assert.equal(fs.readFileSync(mpvCapturePath, 'utf8').trim(), 'generated-before-mpv');
|
||||||
|
assert.match(
|
||||||
|
fs.readFileSync(mpvArgsPath, 'utf8'),
|
||||||
|
/https:\/\/www\.youtube\.com\/watch\?v=test123/,
|
||||||
|
);
|
||||||
|
assert.match(fs.readFileSync(ytdlpLogPath, 'utf8'), /--dump-single-json/);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
test('dictionary command forwards --dictionary and --dictionary-target to app command path', () => {
|
test('dictionary command forwards --dictionary and --dictionary-target to app command path', () => {
|
||||||
withTempDir((root) => {
|
withTempDir((root) => {
|
||||||
const homeDir = path.join(root, 'home');
|
const homeDir = path.join(root, 'home');
|
||||||
@@ -400,15 +544,20 @@ test('parseJellyfinPreviewAuthResponse returns null for invalid payloads', () =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
test('deriveJellyfinTokenStorePath resolves alongside config path', () => {
|
test('deriveJellyfinTokenStorePath resolves alongside config path', () => {
|
||||||
const tokenPath = deriveJellyfinTokenStorePath('/home/test/.config/SubMiner/config.jsonc');
|
const configPath = path.join('/home/test', '.config', 'SubMiner', 'config.jsonc');
|
||||||
assert.equal(tokenPath, '/home/test/.config/SubMiner/jellyfin-token-store.json');
|
const tokenPath = deriveJellyfinTokenStorePath(configPath);
|
||||||
|
assert.equal(tokenPath, path.join(path.dirname(configPath), 'jellyfin-token-store.json'));
|
||||||
});
|
});
|
||||||
|
|
||||||
test('hasStoredJellyfinSession checks token-store existence', () => {
|
test('hasStoredJellyfinSession checks token-store existence', () => {
|
||||||
const exists = (candidate: string): boolean =>
|
const configPath = path.join('/home/test', '.config', 'SubMiner', 'config.jsonc');
|
||||||
candidate === '/home/test/.config/SubMiner/jellyfin-token-store.json';
|
const tokenPath = deriveJellyfinTokenStorePath(configPath);
|
||||||
assert.equal(hasStoredJellyfinSession('/home/test/.config/SubMiner/config.jsonc', exists), true);
|
const exists = (candidate: string): boolean => candidate === tokenPath;
|
||||||
assert.equal(hasStoredJellyfinSession('/home/test/.config/Other/alt.jsonc', exists), false);
|
assert.equal(hasStoredJellyfinSession(configPath, exists), true);
|
||||||
|
assert.equal(
|
||||||
|
hasStoredJellyfinSession(path.join('/home/test', '.config', 'Other', 'alt.jsonc'), exists),
|
||||||
|
false,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('shouldRetryWithStartForNoRunningInstance matches expected app lifecycle error', () => {
|
test('shouldRetryWithStartForNoRunningInstance matches expected app lifecycle error', () => {
|
||||||
|
|||||||
@@ -5,7 +5,14 @@ import path from 'node:path';
|
|||||||
import net from 'node:net';
|
import net from 'node:net';
|
||||||
import { EventEmitter } from 'node:events';
|
import { EventEmitter } from 'node:events';
|
||||||
import type { Args } from './types';
|
import type { Args } from './types';
|
||||||
import { runAppCommandCaptureOutput, startOverlay, state, waitForUnixSocketReady } from './mpv';
|
import {
|
||||||
|
cleanupPlaybackSession,
|
||||||
|
runAppCommandCaptureOutput,
|
||||||
|
shouldResolveAniSkipMetadata,
|
||||||
|
startOverlay,
|
||||||
|
state,
|
||||||
|
waitForUnixSocketReady,
|
||||||
|
} from './mpv';
|
||||||
import * as mpvModule from './mpv';
|
import * as mpvModule from './mpv';
|
||||||
|
|
||||||
function createTempSocketPath(): { dir: string; socketPath: string } {
|
function createTempSocketPath(): { dir: string; socketPath: string } {
|
||||||
@@ -73,6 +80,20 @@ test('waitForUnixSocketReady returns true when socket becomes connectable before
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('shouldResolveAniSkipMetadata skips URL and YouTube-preloaded playback', () => {
|
||||||
|
assert.equal(shouldResolveAniSkipMetadata('/media/show.mkv', 'file'), true);
|
||||||
|
assert.equal(
|
||||||
|
shouldResolveAniSkipMetadata('https://www.youtube.com/watch?v=test123', 'url'),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
shouldResolveAniSkipMetadata('/tmp/video123.webm', 'file', {
|
||||||
|
primaryPath: '/tmp/video123.ja.srt',
|
||||||
|
}),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
function makeArgs(overrides: Partial<Args> = {}): Args {
|
function makeArgs(overrides: Partial<Args> = {}): Args {
|
||||||
return {
|
return {
|
||||||
backend: 'x11',
|
backend: 'x11',
|
||||||
@@ -80,16 +101,19 @@ function makeArgs(overrides: Partial<Args> = {}): Args {
|
|||||||
recursive: false,
|
recursive: false,
|
||||||
profile: '',
|
profile: '',
|
||||||
startOverlay: false,
|
startOverlay: false,
|
||||||
youtubeSubgenMode: 'off',
|
|
||||||
whisperBin: '',
|
whisperBin: '',
|
||||||
whisperModel: '',
|
whisperModel: '',
|
||||||
|
whisperVadModel: '',
|
||||||
|
whisperThreads: 4,
|
||||||
youtubeSubgenOutDir: '',
|
youtubeSubgenOutDir: '',
|
||||||
youtubeSubgenAudioFormat: 'wav',
|
youtubeSubgenAudioFormat: 'wav',
|
||||||
youtubeSubgenKeepTemp: false,
|
youtubeSubgenKeepTemp: false,
|
||||||
|
youtubeFixWithAi: false,
|
||||||
youtubePrimarySubLangs: [],
|
youtubePrimarySubLangs: [],
|
||||||
youtubeSecondarySubLangs: [],
|
youtubeSecondarySubLangs: [],
|
||||||
youtubeAudioLangs: [],
|
youtubeAudioLangs: [],
|
||||||
youtubeWhisperSourceLanguage: 'ja',
|
youtubeWhisperSourceLanguage: 'ja',
|
||||||
|
aiConfig: {},
|
||||||
useTexthooker: false,
|
useTexthooker: false,
|
||||||
autoStartOverlay: false,
|
autoStartOverlay: false,
|
||||||
texthookerOnly: false,
|
texthookerOnly: false,
|
||||||
@@ -152,3 +176,59 @@ test('startOverlay resolves without fixed 2s sleep when readiness signals arrive
|
|||||||
fs.rmSync(dir, { recursive: true, force: true });
|
fs.rmSync(dir, { recursive: true, force: true });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('cleanupPlaybackSession preserves background app while stopping mpv-owned children', async () => {
|
||||||
|
const { dir } = createTempSocketPath();
|
||||||
|
const appPath = path.join(dir, 'fake-subminer.sh');
|
||||||
|
const appInvocationsPath = path.join(dir, 'app-invocations.log');
|
||||||
|
fs.writeFileSync(
|
||||||
|
appPath,
|
||||||
|
`#!/bin/sh\necho \"$@\" >> ${JSON.stringify(appInvocationsPath)}\nexit 0\n`,
|
||||||
|
);
|
||||||
|
fs.chmodSync(appPath, 0o755);
|
||||||
|
|
||||||
|
const calls: string[] = [];
|
||||||
|
const overlayProc = {
|
||||||
|
killed: false,
|
||||||
|
kill: () => {
|
||||||
|
calls.push('overlay-kill');
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
} as unknown as NonNullable<typeof state.overlayProc>;
|
||||||
|
const mpvProc = {
|
||||||
|
killed: false,
|
||||||
|
kill: () => {
|
||||||
|
calls.push('mpv-kill');
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
} as unknown as NonNullable<typeof state.mpvProc>;
|
||||||
|
const helperProc = {
|
||||||
|
killed: false,
|
||||||
|
kill: () => {
|
||||||
|
calls.push('helper-kill');
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
} as unknown as NonNullable<typeof state.overlayProc>;
|
||||||
|
|
||||||
|
state.stopRequested = false;
|
||||||
|
state.appPath = appPath;
|
||||||
|
state.overlayManagedByLauncher = true;
|
||||||
|
state.overlayProc = overlayProc;
|
||||||
|
state.mpvProc = mpvProc;
|
||||||
|
state.youtubeSubgenChildren.add(helperProc);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await cleanupPlaybackSession(makeArgs());
|
||||||
|
|
||||||
|
assert.deepEqual(calls, ['mpv-kill', 'helper-kill']);
|
||||||
|
assert.equal(fs.existsSync(appInvocationsPath), false);
|
||||||
|
} finally {
|
||||||
|
state.overlayProc = null;
|
||||||
|
state.mpvProc = null;
|
||||||
|
state.youtubeSubgenChildren.clear();
|
||||||
|
state.overlayManagedByLauncher = false;
|
||||||
|
state.appPath = '';
|
||||||
|
state.stopRequested = false;
|
||||||
|
fs.rmSync(dir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|||||||
119
launcher/mpv.ts
119
launcher/mpv.ts
@@ -9,8 +9,10 @@ import { log, fail, getMpvLogPath } from './log.js';
|
|||||||
import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js';
|
import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js';
|
||||||
import {
|
import {
|
||||||
commandExists,
|
commandExists,
|
||||||
|
getPathEnv,
|
||||||
isExecutable,
|
isExecutable,
|
||||||
resolveBinaryPathCandidate,
|
resolveBinaryPathCandidate,
|
||||||
|
resolveCommandInvocation,
|
||||||
realpathMaybe,
|
realpathMaybe,
|
||||||
isYoutubeTarget,
|
isYoutubeTarget,
|
||||||
uniqueNormalizedLangCodes,
|
uniqueNormalizedLangCodes,
|
||||||
@@ -27,6 +29,11 @@ export const state = {
|
|||||||
stopRequested: false,
|
stopRequested: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type SpawnTarget = {
|
||||||
|
command: string;
|
||||||
|
args: string[];
|
||||||
|
};
|
||||||
|
|
||||||
const DETACHED_IDLE_MPV_PID_FILE = path.join(os.tmpdir(), 'subminer-idle-mpv.pid');
|
const DETACHED_IDLE_MPV_PID_FILE = path.join(os.tmpdir(), 'subminer-idle-mpv.pid');
|
||||||
const OVERLAY_START_SOCKET_READY_TIMEOUT_MS = 900;
|
const OVERLAY_START_SOCKET_READY_TIMEOUT_MS = 900;
|
||||||
const OVERLAY_START_COMMAND_SETTLE_TIMEOUT_MS = 700;
|
const OVERLAY_START_COMMAND_SETTLE_TIMEOUT_MS = 700;
|
||||||
@@ -199,7 +206,8 @@ export function findAppBinary(selfPath: string): string | null {
|
|||||||
if (isExecutable(candidate)) return candidate;
|
if (isExecutable(candidate)) return candidate;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fromPath = process.env.PATH?.split(path.delimiter)
|
const fromPath = getPathEnv()
|
||||||
|
.split(path.delimiter)
|
||||||
.map((dir) => path.join(dir, 'subminer'))
|
.map((dir) => path.join(dir, 'subminer'))
|
||||||
.find((candidate) => isExecutable(candidate));
|
.find((candidate) => isExecutable(candidate));
|
||||||
|
|
||||||
@@ -419,6 +427,20 @@ export async function loadSubtitleIntoMpv(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function shouldResolveAniSkipMetadata(
|
||||||
|
target: string,
|
||||||
|
targetKind: 'file' | 'url',
|
||||||
|
preloadedSubtitles?: { primaryPath?: string; secondaryPath?: string },
|
||||||
|
): boolean {
|
||||||
|
if (targetKind !== 'file') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (preloadedSubtitles?.primaryPath || preloadedSubtitles?.secondaryPath) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return !isYoutubeTarget(target);
|
||||||
|
}
|
||||||
|
|
||||||
export async function startMpv(
|
export async function startMpv(
|
||||||
target: string,
|
target: string,
|
||||||
targetKind: 'file' | 'url',
|
targetKind: 'file' | 'url',
|
||||||
@@ -456,19 +478,15 @@ export async function startMpv(
|
|||||||
log('debug', args.logLevel, `YouTube subtitle langs: ${subtitleLangs}`);
|
log('debug', args.logLevel, `YouTube subtitle langs: ${subtitleLangs}`);
|
||||||
log('debug', args.logLevel, `YouTube audio langs: ${audioLangs}`);
|
log('debug', args.logLevel, `YouTube audio langs: ${audioLangs}`);
|
||||||
mpvArgs.push(`--ytdl-format=${DEFAULT_YOUTUBE_YTDL_FORMAT}`, `--alang=${audioLangs}`);
|
mpvArgs.push(`--ytdl-format=${DEFAULT_YOUTUBE_YTDL_FORMAT}`, `--alang=${audioLangs}`);
|
||||||
|
|
||||||
if (args.youtubeSubgenMode === 'off') {
|
|
||||||
mpvArgs.push(
|
mpvArgs.push(
|
||||||
'--sub-auto=fuzzy',
|
'--sub-auto=fuzzy',
|
||||||
`--slang=${subtitleLangs}`,
|
`--slang=${subtitleLangs}`,
|
||||||
'--ytdl-raw-options-append=write-auto-subs=',
|
|
||||||
'--ytdl-raw-options-append=write-subs=',
|
'--ytdl-raw-options-append=write-subs=',
|
||||||
'--ytdl-raw-options-append=sub-format=vtt/best',
|
'--ytdl-raw-options-append=sub-format=vtt/best',
|
||||||
`--ytdl-raw-options-append=sub-langs=${subtitleLangs}`,
|
`--ytdl-raw-options-append=sub-langs=${subtitleLangs}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (preloadedSubtitles?.primaryPath) {
|
if (preloadedSubtitles?.primaryPath) {
|
||||||
mpvArgs.push(`--sub-file=${preloadedSubtitles.primaryPath}`);
|
mpvArgs.push(`--sub-file=${preloadedSubtitles.primaryPath}`);
|
||||||
@@ -479,8 +497,9 @@ export async function startMpv(
|
|||||||
if (options?.startPaused) {
|
if (options?.startPaused) {
|
||||||
mpvArgs.push('--pause=yes');
|
mpvArgs.push('--pause=yes');
|
||||||
}
|
}
|
||||||
const aniSkipMetadata =
|
const aniSkipMetadata = shouldResolveAniSkipMetadata(target, targetKind, preloadedSubtitles)
|
||||||
targetKind === 'file' ? await resolveAniSkipMetadataForFile(target) : null;
|
? await resolveAniSkipMetadataForFile(target)
|
||||||
|
: null;
|
||||||
const scriptOpts = buildSubminerScriptOpts(appPath, socketPath, aniSkipMetadata);
|
const scriptOpts = buildSubminerScriptOpts(appPath, socketPath, aniSkipMetadata);
|
||||||
if (aniSkipMetadata) {
|
if (aniSkipMetadata) {
|
||||||
log(
|
log(
|
||||||
@@ -501,7 +520,8 @@ export async function startMpv(
|
|||||||
mpvArgs.push(`--input-ipc-server=${socketPath}`);
|
mpvArgs.push(`--input-ipc-server=${socketPath}`);
|
||||||
mpvArgs.push(target);
|
mpvArgs.push(target);
|
||||||
|
|
||||||
state.mpvProc = spawn('mpv', mpvArgs, { stdio: 'inherit' });
|
const mpvTarget = resolveCommandInvocation('mpv', mpvArgs);
|
||||||
|
state.mpvProc = spawn(mpvTarget.command, mpvTarget.args, { stdio: 'inherit' });
|
||||||
}
|
}
|
||||||
|
|
||||||
async function waitForOverlayStartCommandSettled(
|
async function waitForOverlayStartCommandSettled(
|
||||||
@@ -552,7 +572,8 @@ export async function startOverlay(appPath: string, args: Args, socketPath: stri
|
|||||||
if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel);
|
if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel);
|
||||||
if (args.useTexthooker) overlayArgs.push('--texthooker');
|
if (args.useTexthooker) overlayArgs.push('--texthooker');
|
||||||
|
|
||||||
state.overlayProc = spawn(appPath, overlayArgs, {
|
const target = resolveAppSpawnTarget(appPath, overlayArgs);
|
||||||
|
state.overlayProc = spawn(target.command, target.args, {
|
||||||
stdio: 'inherit',
|
stdio: 'inherit',
|
||||||
env: { ...process.env, SUBMINER_MPV_LOG: getMpvLogPath() },
|
env: { ...process.env, SUBMINER_MPV_LOG: getMpvLogPath() },
|
||||||
});
|
});
|
||||||
@@ -628,6 +649,29 @@ export function stopOverlay(args: Args): void {
|
|||||||
void terminateTrackedDetachedMpv(args.logLevel);
|
void terminateTrackedDetachedMpv(args.logLevel);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function cleanupPlaybackSession(args: Args): Promise<void> {
|
||||||
|
if (state.mpvProc && !state.mpvProc.killed) {
|
||||||
|
try {
|
||||||
|
state.mpvProc.kill('SIGTERM');
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const child of state.youtubeSubgenChildren) {
|
||||||
|
if (!child.killed) {
|
||||||
|
try {
|
||||||
|
child.kill('SIGTERM');
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.youtubeSubgenChildren.clear();
|
||||||
|
|
||||||
|
await terminateTrackedDetachedMpv(args.logLevel);
|
||||||
|
}
|
||||||
|
|
||||||
function buildAppEnv(): NodeJS.ProcessEnv {
|
function buildAppEnv(): NodeJS.ProcessEnv {
|
||||||
const env: Record<string, string | undefined> = {
|
const env: Record<string, string | undefined> = {
|
||||||
...process.env,
|
...process.env,
|
||||||
@@ -648,8 +692,30 @@ function buildAppEnv(): NodeJS.ProcessEnv {
|
|||||||
return env;
|
return env;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function maybeCaptureAppArgs(appArgs: string[]): boolean {
|
||||||
|
const capturePath = process.env.SUBMINER_TEST_CAPTURE?.trim();
|
||||||
|
if (!capturePath) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(capturePath, `${appArgs.join('\n')}${appArgs.length > 0 ? '\n' : ''}`, 'utf8');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveAppSpawnTarget(appPath: string, appArgs: string[]): SpawnTarget {
|
||||||
|
if (process.platform !== 'win32') {
|
||||||
|
return { command: appPath, args: appArgs };
|
||||||
|
}
|
||||||
|
return resolveCommandInvocation(appPath, appArgs);
|
||||||
|
}
|
||||||
|
|
||||||
export function runAppCommandWithInherit(appPath: string, appArgs: string[]): never {
|
export function runAppCommandWithInherit(appPath: string, appArgs: string[]): never {
|
||||||
const result = spawnSync(appPath, appArgs, {
|
if (maybeCaptureAppArgs(appArgs)) {
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const target = resolveAppSpawnTarget(appPath, appArgs);
|
||||||
|
const result = spawnSync(target.command, target.args, {
|
||||||
stdio: 'inherit',
|
stdio: 'inherit',
|
||||||
env: buildAppEnv(),
|
env: buildAppEnv(),
|
||||||
});
|
});
|
||||||
@@ -668,7 +734,16 @@ export function runAppCommandCaptureOutput(
|
|||||||
stderr: string;
|
stderr: string;
|
||||||
error?: Error;
|
error?: Error;
|
||||||
} {
|
} {
|
||||||
const result = spawnSync(appPath, appArgs, {
|
if (maybeCaptureAppArgs(appArgs)) {
|
||||||
|
return {
|
||||||
|
status: 0,
|
||||||
|
stdout: '',
|
||||||
|
stderr: '',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const target = resolveAppSpawnTarget(appPath, appArgs);
|
||||||
|
const result = spawnSync(target.command, target.args, {
|
||||||
env: buildAppEnv(),
|
env: buildAppEnv(),
|
||||||
encoding: 'utf8',
|
encoding: 'utf8',
|
||||||
});
|
});
|
||||||
@@ -687,8 +762,17 @@ export function runAppCommandWithInheritLogged(
|
|||||||
logLevel: LogLevel,
|
logLevel: LogLevel,
|
||||||
label: string,
|
label: string,
|
||||||
): never {
|
): never {
|
||||||
log('debug', logLevel, `${label}: launching app with args: ${appArgs.join(' ')}`);
|
if (maybeCaptureAppArgs(appArgs)) {
|
||||||
const result = spawnSync(appPath, appArgs, {
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const target = resolveAppSpawnTarget(appPath, appArgs);
|
||||||
|
log(
|
||||||
|
'debug',
|
||||||
|
logLevel,
|
||||||
|
`${label}: launching app with args: ${[target.command, ...target.args].join(' ')}`,
|
||||||
|
);
|
||||||
|
const result = spawnSync(target.command, target.args, {
|
||||||
stdio: 'inherit',
|
stdio: 'inherit',
|
||||||
env: buildAppEnv(),
|
env: buildAppEnv(),
|
||||||
});
|
});
|
||||||
@@ -702,7 +786,11 @@ export function runAppCommandWithInheritLogged(
|
|||||||
export function launchAppStartDetached(appPath: string, logLevel: LogLevel): void {
|
export function launchAppStartDetached(appPath: string, logLevel: LogLevel): void {
|
||||||
const startArgs = ['--start'];
|
const startArgs = ['--start'];
|
||||||
if (logLevel !== 'info') startArgs.push('--log-level', logLevel);
|
if (logLevel !== 'info') startArgs.push('--log-level', logLevel);
|
||||||
const proc = spawn(appPath, startArgs, {
|
if (maybeCaptureAppArgs(startArgs)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const target = resolveAppSpawnTarget(appPath, startArgs);
|
||||||
|
const proc = spawn(target.command, target.args, {
|
||||||
stdio: 'ignore',
|
stdio: 'ignore',
|
||||||
detached: true,
|
detached: true,
|
||||||
env: buildAppEnv(),
|
env: buildAppEnv(),
|
||||||
@@ -732,7 +820,8 @@ export function launchMpvIdleDetached(
|
|||||||
);
|
);
|
||||||
mpvArgs.push(`--log-file=${getMpvLogPath()}`);
|
mpvArgs.push(`--log-file=${getMpvLogPath()}`);
|
||||||
mpvArgs.push(`--input-ipc-server=${socketPath}`);
|
mpvArgs.push(`--input-ipc-server=${socketPath}`);
|
||||||
const proc = spawn('mpv', mpvArgs, {
|
const mpvTarget = resolveCommandInvocation('mpv', mpvArgs);
|
||||||
|
const proc = spawn(mpvTarget.command, mpvTarget.args, {
|
||||||
stdio: 'ignore',
|
stdio: 'ignore',
|
||||||
detached: true,
|
detached: true,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,22 +7,26 @@ test('waitForSetupCompletion resolves completed and cancelled states', async ()
|
|||||||
const sequence: Array<SetupState | null> = [
|
const sequence: Array<SetupState | null> = [
|
||||||
null,
|
null,
|
||||||
{
|
{
|
||||||
version: 1,
|
version: 2,
|
||||||
status: 'in_progress',
|
status: 'in_progress',
|
||||||
completedAt: null,
|
completedAt: null,
|
||||||
completionSource: null,
|
completionSource: null,
|
||||||
lastSeenYomitanDictionaryCount: 0,
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
pluginInstallStatus: 'unknown',
|
pluginInstallStatus: 'unknown',
|
||||||
pluginInstallPathSummary: null,
|
pluginInstallPathSummary: null,
|
||||||
|
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
|
||||||
|
windowsMpvShortcutLastStatus: 'unknown',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
version: 1,
|
version: 2,
|
||||||
status: 'completed',
|
status: 'completed',
|
||||||
completedAt: '2026-03-07T00:00:00.000Z',
|
completedAt: '2026-03-07T00:00:00.000Z',
|
||||||
completionSource: 'user',
|
completionSource: 'user',
|
||||||
lastSeenYomitanDictionaryCount: 1,
|
lastSeenYomitanDictionaryCount: 1,
|
||||||
pluginInstallStatus: 'skipped',
|
pluginInstallStatus: 'skipped',
|
||||||
pluginInstallPathSummary: null,
|
pluginInstallPathSummary: null,
|
||||||
|
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
|
||||||
|
windowsMpvShortcutLastStatus: 'skipped',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -50,23 +54,27 @@ test('ensureLauncherSetupReady launches setup app and resumes only after complet
|
|||||||
if (reads === 1) return null;
|
if (reads === 1) return null;
|
||||||
if (reads === 2) {
|
if (reads === 2) {
|
||||||
return {
|
return {
|
||||||
version: 1,
|
version: 2,
|
||||||
status: 'in_progress',
|
status: 'in_progress',
|
||||||
completedAt: null,
|
completedAt: null,
|
||||||
completionSource: null,
|
completionSource: null,
|
||||||
lastSeenYomitanDictionaryCount: 0,
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
pluginInstallStatus: 'unknown',
|
pluginInstallStatus: 'unknown',
|
||||||
pluginInstallPathSummary: null,
|
pluginInstallPathSummary: null,
|
||||||
|
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
|
||||||
|
windowsMpvShortcutLastStatus: 'unknown',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
version: 1,
|
version: 2,
|
||||||
status: 'completed',
|
status: 'completed',
|
||||||
completedAt: '2026-03-07T00:00:00.000Z',
|
completedAt: '2026-03-07T00:00:00.000Z',
|
||||||
completionSource: 'user',
|
completionSource: 'user',
|
||||||
lastSeenYomitanDictionaryCount: 1,
|
lastSeenYomitanDictionaryCount: 1,
|
||||||
pluginInstallStatus: 'installed',
|
pluginInstallStatus: 'installed',
|
||||||
pluginInstallPathSummary: '/tmp/mpv',
|
pluginInstallPathSummary: '/tmp/mpv',
|
||||||
|
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
|
||||||
|
windowsMpvShortcutLastStatus: 'installed',
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
launchSetupApp: () => {
|
launchSetupApp: () => {
|
||||||
@@ -88,13 +96,15 @@ test('ensureLauncherSetupReady launches setup app and resumes only after complet
|
|||||||
test('ensureLauncherSetupReady fails on timeout/cancelled state', async () => {
|
test('ensureLauncherSetupReady fails on timeout/cancelled state', async () => {
|
||||||
const result = await ensureLauncherSetupReady({
|
const result = await ensureLauncherSetupReady({
|
||||||
readSetupState: () => ({
|
readSetupState: () => ({
|
||||||
version: 1,
|
version: 2,
|
||||||
status: 'cancelled',
|
status: 'cancelled',
|
||||||
completedAt: null,
|
completedAt: null,
|
||||||
completionSource: null,
|
completionSource: null,
|
||||||
lastSeenYomitanDictionaryCount: 0,
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
pluginInstallStatus: 'unknown',
|
pluginInstallStatus: 'unknown',
|
||||||
pluginInstallPathSummary: null,
|
pluginInstallPathSummary: null,
|
||||||
|
windowsMpvShortcutPreferences: { startMenuEnabled: true, desktopEnabled: true },
|
||||||
|
windowsMpvShortcutLastStatus: 'unknown',
|
||||||
}),
|
}),
|
||||||
launchSetupApp: () => undefined,
|
launchSetupApp: () => undefined,
|
||||||
sleep: async () => undefined,
|
sleep: async () => undefined,
|
||||||
|
|||||||
@@ -295,7 +295,7 @@ test('launcher mpv status returns ready when socket is connectable', async () =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
test(
|
test(
|
||||||
'launcher start-overlay run forwards socket/backend and stops overlay after mpv exits',
|
'launcher start-overlay run forwards socket/backend and keeps background app alive after mpv exits',
|
||||||
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
|
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
|
||||||
async () => {
|
async () => {
|
||||||
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
|
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
|
||||||
@@ -310,7 +310,6 @@ test(
|
|||||||
const appStartPath = path.join(smokeCase.artifactsDir, 'fake-app-start.log');
|
const appStartPath = path.join(smokeCase.artifactsDir, 'fake-app-start.log');
|
||||||
const appStopPath = path.join(smokeCase.artifactsDir, 'fake-app-stop.log');
|
const appStopPath = path.join(smokeCase.artifactsDir, 'fake-app-stop.log');
|
||||||
await waitForJsonLines(appStartPath, 1);
|
await waitForJsonLines(appStartPath, 1);
|
||||||
await waitForJsonLines(appStopPath, 1);
|
|
||||||
|
|
||||||
const appStartEntries = readJsonLines(appStartPath);
|
const appStartEntries = readJsonLines(appStartPath);
|
||||||
const appStopEntries = readJsonLines(appStopPath);
|
const appStopEntries = readJsonLines(appStopPath);
|
||||||
@@ -325,7 +324,7 @@ test(
|
|||||||
assert.match(result.stdout, /Starting SubMiner overlay/i);
|
assert.match(result.stdout, /Starting SubMiner overlay/i);
|
||||||
|
|
||||||
assert.equal(appStartEntries.length, 1);
|
assert.equal(appStartEntries.length, 1);
|
||||||
assert.equal(appStopEntries.length, 1);
|
assert.equal(appStopEntries.length, 0);
|
||||||
assert.equal(mpvEntries.length >= 1, true);
|
assert.equal(mpvEntries.length >= 1, true);
|
||||||
|
|
||||||
const appStartArgs = appStartEntries[0]?.argv;
|
const appStartArgs = appStartEntries[0]?.argv;
|
||||||
@@ -337,9 +336,6 @@ test(
|
|||||||
assert.equal((appStartArgs as string[]).includes(smokeCase.socketPath), true);
|
assert.equal((appStartArgs as string[]).includes(smokeCase.socketPath), true);
|
||||||
assert.equal(appStartEntries[0]?.subminerMpvLog, smokeCase.mpvOverlayLogPath);
|
assert.equal(appStartEntries[0]?.subminerMpvLog, smokeCase.mpvOverlayLogPath);
|
||||||
|
|
||||||
const appStopArgs = appStopEntries[0]?.argv;
|
|
||||||
assert.deepEqual(appStopArgs, ['--stop']);
|
|
||||||
|
|
||||||
const mpvFirstArgs = mpvEntries[0]?.argv;
|
const mpvFirstArgs = mpvEntries[0]?.argv;
|
||||||
assert.equal(Array.isArray(mpvFirstArgs), true);
|
assert.equal(Array.isArray(mpvFirstArgs), true);
|
||||||
assert.equal(
|
assert.equal(
|
||||||
|
|||||||
@@ -3,7 +3,14 @@ import os from 'node:os';
|
|||||||
export { VIDEO_EXTENSIONS } from '../src/shared/video-extensions.js';
|
export { VIDEO_EXTENSIONS } from '../src/shared/video-extensions.js';
|
||||||
|
|
||||||
export const ROFI_THEME_FILE = 'subminer.rasi';
|
export const ROFI_THEME_FILE = 'subminer.rasi';
|
||||||
export const DEFAULT_SOCKET_PATH = '/tmp/subminer-socket';
|
export function getDefaultSocketPath(platform: NodeJS.Platform = process.platform): string {
|
||||||
|
if (platform === 'win32') {
|
||||||
|
return '\\\\.\\pipe\\subminer-socket';
|
||||||
|
}
|
||||||
|
return '/tmp/subminer-socket';
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DEFAULT_SOCKET_PATH = getDefaultSocketPath();
|
||||||
export const DEFAULT_YOUTUBE_PRIMARY_SUB_LANGS = ['ja', 'jpn'];
|
export const DEFAULT_YOUTUBE_PRIMARY_SUB_LANGS = ['ja', 'jpn'];
|
||||||
export const DEFAULT_YOUTUBE_SECONDARY_SUB_LANGS = ['en', 'eng'];
|
export const DEFAULT_YOUTUBE_SECONDARY_SUB_LANGS = ['en', 'eng'];
|
||||||
export const YOUTUBE_SUB_EXTENSIONS = new Set(['.srt', '.vtt', '.ass']);
|
export const YOUTUBE_SUB_EXTENSIONS = new Set(['.srt', '.vtt', '.ass']);
|
||||||
@@ -22,13 +29,21 @@ export const DEFAULT_YOUTUBE_SUBGEN_OUT_DIR = path.join(
|
|||||||
'subminer',
|
'subminer',
|
||||||
'youtube-subs',
|
'youtube-subs',
|
||||||
);
|
);
|
||||||
export const DEFAULT_MPV_LOG_FILE = path.join(
|
export function getDefaultMpvLogFile(options?: {
|
||||||
os.homedir(),
|
platform?: NodeJS.Platform;
|
||||||
'.config',
|
homeDir?: string;
|
||||||
'SubMiner',
|
appDataDir?: string;
|
||||||
'logs',
|
}): string {
|
||||||
`SubMiner-${new Date().toISOString().slice(0, 10)}.log`,
|
const platform = options?.platform ?? process.platform;
|
||||||
);
|
const homeDir = options?.homeDir ?? os.homedir();
|
||||||
|
const baseDir =
|
||||||
|
platform === 'win32'
|
||||||
|
? path.join(options?.appDataDir?.trim() || path.join(homeDir, 'AppData', 'Roaming'), 'SubMiner')
|
||||||
|
: path.join(homeDir, '.config', 'SubMiner');
|
||||||
|
return path.join(baseDir, 'logs', `SubMiner-${new Date().toISOString().slice(0, 10)}.log`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DEFAULT_MPV_LOG_FILE = getDefaultMpvLogFile();
|
||||||
export const DEFAULT_YOUTUBE_YTDL_FORMAT = 'bestvideo*+bestaudio/best';
|
export const DEFAULT_YOUTUBE_YTDL_FORMAT = 'bestvideo*+bestaudio/best';
|
||||||
export const DEFAULT_JIMAKU_API_BASE_URL = 'https://jimaku.cc';
|
export const DEFAULT_JIMAKU_API_BASE_URL = 'https://jimaku.cc';
|
||||||
export const DEFAULT_MPV_SUBMINER_ARGS = [
|
export const DEFAULT_MPV_SUBMINER_ARGS = [
|
||||||
@@ -42,26 +57,38 @@ export const DEFAULT_MPV_SUBMINER_ARGS = [
|
|||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
||||||
export type YoutubeSubgenMode = 'automatic' | 'preprocess' | 'off';
|
|
||||||
export type Backend = 'auto' | 'hyprland' | 'x11' | 'macos';
|
export type Backend = 'auto' | 'hyprland' | 'x11' | 'macos';
|
||||||
export type JimakuLanguagePreference = 'ja' | 'en' | 'none';
|
export type JimakuLanguagePreference = 'ja' | 'en' | 'none';
|
||||||
|
|
||||||
|
export interface LauncherAiConfig {
|
||||||
|
enabled?: boolean;
|
||||||
|
apiKey?: string;
|
||||||
|
apiKeyCommand?: string;
|
||||||
|
baseUrl?: string;
|
||||||
|
model?: string;
|
||||||
|
systemPrompt?: string;
|
||||||
|
requestTimeoutMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
export interface Args {
|
export interface Args {
|
||||||
backend: Backend;
|
backend: Backend;
|
||||||
directory: string;
|
directory: string;
|
||||||
recursive: boolean;
|
recursive: boolean;
|
||||||
profile: string;
|
profile: string;
|
||||||
startOverlay: boolean;
|
startOverlay: boolean;
|
||||||
youtubeSubgenMode: YoutubeSubgenMode;
|
|
||||||
whisperBin: string;
|
whisperBin: string;
|
||||||
whisperModel: string;
|
whisperModel: string;
|
||||||
|
whisperVadModel: string;
|
||||||
|
whisperThreads: number;
|
||||||
youtubeSubgenOutDir: string;
|
youtubeSubgenOutDir: string;
|
||||||
youtubeSubgenAudioFormat: string;
|
youtubeSubgenAudioFormat: string;
|
||||||
youtubeSubgenKeepTemp: boolean;
|
youtubeSubgenKeepTemp: boolean;
|
||||||
|
youtubeFixWithAi: boolean;
|
||||||
youtubePrimarySubLangs: string[];
|
youtubePrimarySubLangs: string[];
|
||||||
youtubeSecondarySubLangs: string[];
|
youtubeSecondarySubLangs: string[];
|
||||||
youtubeAudioLangs: string[];
|
youtubeAudioLangs: string[];
|
||||||
youtubeWhisperSourceLanguage: string;
|
youtubeWhisperSourceLanguage: string;
|
||||||
|
aiConfig: LauncherAiConfig;
|
||||||
useTexthooker: boolean;
|
useTexthooker: boolean;
|
||||||
autoStartOverlay: boolean;
|
autoStartOverlay: boolean;
|
||||||
texthookerOnly: boolean;
|
texthookerOnly: boolean;
|
||||||
@@ -96,9 +123,12 @@ export interface Args {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface LauncherYoutubeSubgenConfig {
|
export interface LauncherYoutubeSubgenConfig {
|
||||||
mode?: YoutubeSubgenMode;
|
|
||||||
whisperBin?: string;
|
whisperBin?: string;
|
||||||
whisperModel?: string;
|
whisperModel?: string;
|
||||||
|
whisperVadModel?: string;
|
||||||
|
whisperThreads?: number;
|
||||||
|
fixWithAi?: boolean;
|
||||||
|
ai?: LauncherAiConfig;
|
||||||
primarySubLanguages?: string[];
|
primarySubLanguages?: string[];
|
||||||
secondarySubLanguages?: string[];
|
secondarySubLanguages?: string[];
|
||||||
jimakuApiKey?: string;
|
jimakuApiKey?: string;
|
||||||
@@ -144,13 +174,15 @@ export interface SubtitleCandidate {
|
|||||||
lang: 'primary' | 'secondary';
|
lang: 'primary' | 'secondary';
|
||||||
ext: string;
|
ext: string;
|
||||||
size: number;
|
size: number;
|
||||||
source: 'manual' | 'auto' | 'whisper' | 'whisper-translate';
|
source: 'manual' | 'whisper' | 'whisper-fixed' | 'whisper-translate' | 'whisper-translate-fixed';
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface YoutubeSubgenOutputs {
|
export interface YoutubeSubgenOutputs {
|
||||||
basename: string;
|
basename: string;
|
||||||
primaryPath?: string;
|
primaryPath?: string;
|
||||||
secondaryPath?: string;
|
secondaryPath?: string;
|
||||||
|
primaryNative?: boolean;
|
||||||
|
secondaryNative?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface MpvTrack {
|
export interface MpvTrack {
|
||||||
|
|||||||
191
launcher/util.ts
191
launcher/util.ts
@@ -18,14 +18,139 @@ export function isExecutable(filePath: string): boolean {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function commandExists(command: string): boolean {
|
function isRunnableFile(filePath: string): boolean {
|
||||||
const pathEnv = process.env.PATH ?? '';
|
try {
|
||||||
|
if (!fs.statSync(filePath).isFile()) return false;
|
||||||
|
return process.platform === 'win32' ? true : isExecutable(filePath);
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPathLikeCommand(command: string): boolean {
|
||||||
|
return (
|
||||||
|
command.includes('/') ||
|
||||||
|
command.includes('\\') ||
|
||||||
|
/^[A-Za-z]:[\\/]/.test(command) ||
|
||||||
|
command.startsWith('.')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getWindowsPathExts(): string[] {
|
||||||
|
const raw = process.env.PATHEXT ?? '.COM;.EXE;.BAT;.CMD';
|
||||||
|
return raw
|
||||||
|
.split(';')
|
||||||
|
.map((entry) => entry.trim())
|
||||||
|
.filter((entry) => entry.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getPathEnv(): string {
|
||||||
|
const pathKey = Object.keys(process.env).find((key) => key.toLowerCase() === 'path');
|
||||||
|
return pathKey ? (process.env[pathKey] ?? '') : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveExecutablePath(command: string): string | null {
|
||||||
|
const tryCandidate = (candidate: string): string | null =>
|
||||||
|
isRunnableFile(candidate) ? candidate : null;
|
||||||
|
|
||||||
|
const resolveWindowsCandidate = (candidate: string): string | null => {
|
||||||
|
const direct = tryCandidate(candidate);
|
||||||
|
if (direct) return direct;
|
||||||
|
if (path.extname(candidate)) return null;
|
||||||
|
for (const ext of getWindowsPathExts()) {
|
||||||
|
const withExt = tryCandidate(`${candidate}${ext}`);
|
||||||
|
if (withExt) return withExt;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isPathLikeCommand(command)) {
|
||||||
|
const resolved = path.resolve(resolvePathMaybe(command));
|
||||||
|
return process.platform === 'win32' ? resolveWindowsCandidate(resolved) : tryCandidate(resolved);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathEnv = getPathEnv();
|
||||||
for (const dir of pathEnv.split(path.delimiter)) {
|
for (const dir of pathEnv.split(path.delimiter)) {
|
||||||
if (!dir) continue;
|
if (!dir) continue;
|
||||||
const full = path.join(dir, command);
|
const candidate = path.join(dir, command);
|
||||||
if (isExecutable(full)) return true;
|
const resolved =
|
||||||
|
process.platform === 'win32' ? resolveWindowsCandidate(candidate) : tryCandidate(candidate);
|
||||||
|
if (resolved) return resolved;
|
||||||
}
|
}
|
||||||
return false;
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeWindowsBashArg(value: string): string {
|
||||||
|
const normalized = value.replace(/\\/g, '/');
|
||||||
|
const driveMatch = normalized.match(/^([A-Za-z]):\/(.*)$/);
|
||||||
|
if (!driveMatch) {
|
||||||
|
return normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, driveLetter, remainder] = driveMatch;
|
||||||
|
return `/mnt/${driveLetter!.toLowerCase()}/${remainder}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveGitBashExecutable(): string | null {
|
||||||
|
const directCandidates = [
|
||||||
|
'C:\\Program Files\\Git\\bin\\bash.exe',
|
||||||
|
'C:\\Program Files\\Git\\usr\\bin\\bash.exe',
|
||||||
|
];
|
||||||
|
for (const candidate of directCandidates) {
|
||||||
|
if (isRunnableFile(candidate)) return candidate;
|
||||||
|
}
|
||||||
|
|
||||||
|
const gitExecutable = resolveExecutablePath('git');
|
||||||
|
if (!gitExecutable) return null;
|
||||||
|
const gitDir = path.dirname(gitExecutable);
|
||||||
|
const inferredCandidates = [
|
||||||
|
path.resolve(gitDir, '..', 'bin', 'bash.exe'),
|
||||||
|
path.resolve(gitDir, '..', 'usr', 'bin', 'bash.exe'),
|
||||||
|
];
|
||||||
|
for (const candidate of inferredCandidates) {
|
||||||
|
if (isRunnableFile(candidate)) return candidate;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveWindowsBashTarget(): {
|
||||||
|
command: string;
|
||||||
|
flavor: 'git' | 'wsl';
|
||||||
|
} {
|
||||||
|
const gitBash = resolveGitBashExecutable();
|
||||||
|
if (gitBash) {
|
||||||
|
return { command: gitBash, flavor: 'git' };
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
command: resolveExecutablePath('bash') ?? 'bash',
|
||||||
|
flavor: 'wsl',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeWindowsShellArg(value: string, flavor: 'git' | 'wsl'): string {
|
||||||
|
if (!isPathLikeCommand(value)) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
return flavor === 'git' ? value.replace(/\\/g, '/') : normalizeWindowsBashArg(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function readShebang(filePath: string): string {
|
||||||
|
try {
|
||||||
|
const fd = fs.openSync(filePath, 'r');
|
||||||
|
try {
|
||||||
|
const buffer = Buffer.alloc(160);
|
||||||
|
const bytesRead = fs.readSync(fd, buffer, 0, buffer.length, 0);
|
||||||
|
return buffer.toString('utf8', 0, bytesRead).split(/\r?\n/, 1)[0] ?? '';
|
||||||
|
} finally {
|
||||||
|
fs.closeSync(fd);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function commandExists(command: string): boolean {
|
||||||
|
return resolveExecutablePath(command) !== null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function resolvePathMaybe(input: string): string {
|
export function resolvePathMaybe(input: string): string {
|
||||||
@@ -116,6 +241,51 @@ export function inferWhisperLanguage(langCodes: string[], fallback: string): str
|
|||||||
return fallback;
|
return fallback;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function resolveCommandInvocation(
|
||||||
|
executable: string,
|
||||||
|
args: string[],
|
||||||
|
): { command: string; args: string[] } {
|
||||||
|
if (process.platform !== 'win32') {
|
||||||
|
return { command: executable, args };
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedExecutable = resolveExecutablePath(executable) ?? executable;
|
||||||
|
const extension = path.extname(resolvedExecutable).toLowerCase();
|
||||||
|
if (extension === '.ps1') {
|
||||||
|
return {
|
||||||
|
command: 'powershell.exe',
|
||||||
|
args: ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', resolvedExecutable, ...args],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extension === '.sh') {
|
||||||
|
const bashTarget = resolveWindowsBashTarget();
|
||||||
|
return {
|
||||||
|
command: bashTarget.command,
|
||||||
|
args: [
|
||||||
|
normalizeWindowsShellArg(resolvedExecutable, bashTarget.flavor),
|
||||||
|
...args.map((arg) => normalizeWindowsShellArg(arg, bashTarget.flavor)),
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!extension) {
|
||||||
|
const shebang = readShebang(resolvedExecutable);
|
||||||
|
if (/^#!.*\b(?:sh|bash)\b/i.test(shebang)) {
|
||||||
|
const bashTarget = resolveWindowsBashTarget();
|
||||||
|
return {
|
||||||
|
command: bashTarget.command,
|
||||||
|
args: [
|
||||||
|
normalizeWindowsShellArg(resolvedExecutable, bashTarget.flavor),
|
||||||
|
...args.map((arg) => normalizeWindowsShellArg(arg, bashTarget.flavor)),
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { command: resolvedExecutable, args };
|
||||||
|
}
|
||||||
|
|
||||||
export function runExternalCommand(
|
export function runExternalCommand(
|
||||||
executable: string,
|
executable: string,
|
||||||
args: string[],
|
args: string[],
|
||||||
@@ -129,8 +299,13 @@ export function runExternalCommand(
|
|||||||
const streamOutput = opts.streamOutput === true;
|
const streamOutput = opts.streamOutput === true;
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
log('debug', configuredLogLevel, `[${commandLabel}] spawn: ${executable} ${args.join(' ')}`);
|
const target = resolveCommandInvocation(executable, args);
|
||||||
const child = spawn(executable, args, {
|
log(
|
||||||
|
'debug',
|
||||||
|
configuredLogLevel,
|
||||||
|
`[${commandLabel}] spawn: ${target.command} ${target.args.join(' ')}`,
|
||||||
|
);
|
||||||
|
const child = spawn(target.command, target.args, {
|
||||||
stdio: ['ignore', 'pipe', 'pipe'],
|
stdio: ['ignore', 'pipe', 'pipe'],
|
||||||
env: { ...process.env, ...opts.env },
|
env: { ...process.env, ...opts.env },
|
||||||
});
|
});
|
||||||
@@ -201,7 +376,7 @@ export function runExternalCommand(
|
|||||||
`[${commandLabel}] exit code ${code ?? 1}`,
|
`[${commandLabel}] exit code ${code ?? 1}`,
|
||||||
);
|
);
|
||||||
if (code !== 0 && !allowFailure) {
|
if (code !== 0 && !allowFailure) {
|
||||||
const commandString = `${executable} ${args.join(' ')}`;
|
const commandString = `${target.command} ${target.args.join(' ')}`;
|
||||||
reject(
|
reject(
|
||||||
new Error(`Command failed (${commandString}): ${stderr.trim() || `exit code ${code}`}`),
|
new Error(`Command failed (${commandString}): ${stderr.trim() || `exit code ${code}`}`),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,467 +1 @@
|
|||||||
import fs from 'node:fs';
|
export { generateYoutubeSubtitles, resolveWhisperBinary } from './youtube/orchestrator.js';
|
||||||
import path from 'node:path';
|
|
||||||
import os from 'node:os';
|
|
||||||
import type { Args, SubtitleCandidate, YoutubeSubgenOutputs } from './types.js';
|
|
||||||
import { YOUTUBE_SUB_EXTENSIONS, YOUTUBE_AUDIO_EXTENSIONS } from './types.js';
|
|
||||||
import { log } from './log.js';
|
|
||||||
import {
|
|
||||||
resolvePathMaybe,
|
|
||||||
uniqueNormalizedLangCodes,
|
|
||||||
escapeRegExp,
|
|
||||||
normalizeBasename,
|
|
||||||
runExternalCommand,
|
|
||||||
commandExists,
|
|
||||||
} from './util.js';
|
|
||||||
import { state } from './mpv.js';
|
|
||||||
|
|
||||||
function toYtdlpLangPattern(langCodes: string[]): string {
|
|
||||||
return langCodes.map((lang) => `${lang}.*`).join(',');
|
|
||||||
}
|
|
||||||
|
|
||||||
function filenameHasLanguageTag(filenameLower: string, langCode: string): boolean {
|
|
||||||
const escaped = escapeRegExp(langCode);
|
|
||||||
const pattern = new RegExp(`(^|[._-])${escaped}([._-]|$)`);
|
|
||||||
return pattern.test(filenameLower);
|
|
||||||
}
|
|
||||||
|
|
||||||
function classifyLanguage(
|
|
||||||
filename: string,
|
|
||||||
primaryLangCodes: string[],
|
|
||||||
secondaryLangCodes: string[],
|
|
||||||
): 'primary' | 'secondary' | null {
|
|
||||||
const lower = filename.toLowerCase();
|
|
||||||
const primary = primaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
|
|
||||||
const secondary = secondaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
|
|
||||||
if (primary && !secondary) return 'primary';
|
|
||||||
if (secondary && !primary) return 'secondary';
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function preferredLangLabel(langCodes: string[], fallback: string): string {
|
|
||||||
return uniqueNormalizedLangCodes(langCodes)[0] || fallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
function sourceTag(source: SubtitleCandidate['source']): string {
|
|
||||||
if (source === 'manual' || source === 'auto') return `ytdlp-${source}`;
|
|
||||||
if (source === 'whisper-translate') return 'whisper-translate';
|
|
||||||
return 'whisper';
|
|
||||||
}
|
|
||||||
|
|
||||||
function pickBestCandidate(candidates: SubtitleCandidate[]): SubtitleCandidate | null {
|
|
||||||
if (candidates.length === 0) return null;
|
|
||||||
const scored = [...candidates].sort((a, b) => {
|
|
||||||
const sourceA = a.source === 'manual' ? 1 : 0;
|
|
||||||
const sourceB = b.source === 'manual' ? 1 : 0;
|
|
||||||
if (sourceA !== sourceB) return sourceB - sourceA;
|
|
||||||
const srtA = a.ext === '.srt' ? 1 : 0;
|
|
||||||
const srtB = b.ext === '.srt' ? 1 : 0;
|
|
||||||
if (srtA !== srtB) return srtB - srtA;
|
|
||||||
return b.size - a.size;
|
|
||||||
});
|
|
||||||
return scored[0] ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function scanSubtitleCandidates(
|
|
||||||
tempDir: string,
|
|
||||||
knownSet: Set<string>,
|
|
||||||
source: 'manual' | 'auto',
|
|
||||||
primaryLangCodes: string[],
|
|
||||||
secondaryLangCodes: string[],
|
|
||||||
): SubtitleCandidate[] {
|
|
||||||
const entries = fs.readdirSync(tempDir);
|
|
||||||
const out: SubtitleCandidate[] = [];
|
|
||||||
for (const name of entries) {
|
|
||||||
const fullPath = path.join(tempDir, name);
|
|
||||||
if (knownSet.has(fullPath)) continue;
|
|
||||||
let stat: fs.Stats;
|
|
||||||
try {
|
|
||||||
stat = fs.statSync(fullPath);
|
|
||||||
} catch {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!stat.isFile()) continue;
|
|
||||||
const ext = path.extname(fullPath).toLowerCase();
|
|
||||||
if (!YOUTUBE_SUB_EXTENSIONS.has(ext)) continue;
|
|
||||||
const lang = classifyLanguage(name, primaryLangCodes, secondaryLangCodes);
|
|
||||||
if (!lang) continue;
|
|
||||||
out.push({ path: fullPath, lang, ext, size: stat.size, source });
|
|
||||||
}
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function convertToSrt(
|
|
||||||
inputPath: string,
|
|
||||||
tempDir: string,
|
|
||||||
langLabel: string,
|
|
||||||
): Promise<string> {
|
|
||||||
if (path.extname(inputPath).toLowerCase() === '.srt') return inputPath;
|
|
||||||
const outputPath = path.join(tempDir, `converted.${langLabel}.srt`);
|
|
||||||
await runExternalCommand('ffmpeg', ['-y', '-loglevel', 'error', '-i', inputPath, outputPath]);
|
|
||||||
return outputPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
function findAudioFile(tempDir: string, preferredExt: string): string | null {
|
|
||||||
const entries = fs.readdirSync(tempDir);
|
|
||||||
const audioFiles: Array<{ path: string; ext: string; mtimeMs: number }> = [];
|
|
||||||
for (const name of entries) {
|
|
||||||
const fullPath = path.join(tempDir, name);
|
|
||||||
let stat: fs.Stats;
|
|
||||||
try {
|
|
||||||
stat = fs.statSync(fullPath);
|
|
||||||
} catch {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!stat.isFile()) continue;
|
|
||||||
const ext = path.extname(name).toLowerCase();
|
|
||||||
if (!YOUTUBE_AUDIO_EXTENSIONS.has(ext)) continue;
|
|
||||||
audioFiles.push({ path: fullPath, ext, mtimeMs: stat.mtimeMs });
|
|
||||||
}
|
|
||||||
if (audioFiles.length === 0) return null;
|
|
||||||
const preferred = audioFiles.find((entry) => entry.ext === `.${preferredExt.toLowerCase()}`);
|
|
||||||
if (preferred) return preferred.path;
|
|
||||||
audioFiles.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
|
||||||
return audioFiles[0]?.path ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runWhisper(
|
|
||||||
whisperBin: string,
|
|
||||||
modelPath: string,
|
|
||||||
audioPath: string,
|
|
||||||
language: string,
|
|
||||||
translate: boolean,
|
|
||||||
outputPrefix: string,
|
|
||||||
): Promise<string> {
|
|
||||||
const args = [
|
|
||||||
'-m',
|
|
||||||
modelPath,
|
|
||||||
'-f',
|
|
||||||
audioPath,
|
|
||||||
'--output-srt',
|
|
||||||
'--output-file',
|
|
||||||
outputPrefix,
|
|
||||||
'--language',
|
|
||||||
language,
|
|
||||||
];
|
|
||||||
if (translate) args.push('--translate');
|
|
||||||
await runExternalCommand(whisperBin, args, {
|
|
||||||
commandLabel: 'whisper',
|
|
||||||
streamOutput: true,
|
|
||||||
});
|
|
||||||
const outputPath = `${outputPrefix}.srt`;
|
|
||||||
if (!fs.existsSync(outputPath)) {
|
|
||||||
throw new Error(`whisper output not found: ${outputPath}`);
|
|
||||||
}
|
|
||||||
return outputPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function convertAudioForWhisper(inputPath: string, tempDir: string): Promise<string> {
|
|
||||||
const wavPath = path.join(tempDir, 'whisper-input.wav');
|
|
||||||
await runExternalCommand('ffmpeg', [
|
|
||||||
'-y',
|
|
||||||
'-loglevel',
|
|
||||||
'error',
|
|
||||||
'-i',
|
|
||||||
inputPath,
|
|
||||||
'-ar',
|
|
||||||
'16000',
|
|
||||||
'-ac',
|
|
||||||
'1',
|
|
||||||
'-c:a',
|
|
||||||
'pcm_s16le',
|
|
||||||
wavPath,
|
|
||||||
]);
|
|
||||||
if (!fs.existsSync(wavPath)) {
|
|
||||||
throw new Error(`Failed to prepare whisper audio input: ${wavPath}`);
|
|
||||||
}
|
|
||||||
return wavPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveWhisperBinary(args: Args): string | null {
|
|
||||||
const explicit = args.whisperBin.trim();
|
|
||||||
if (explicit) return resolvePathMaybe(explicit);
|
|
||||||
if (commandExists('whisper-cli')) return 'whisper-cli';
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function generateYoutubeSubtitles(
|
|
||||||
target: string,
|
|
||||||
args: Args,
|
|
||||||
onReady?: (lang: 'primary' | 'secondary', pathToLoad: string) => Promise<void>,
|
|
||||||
): Promise<YoutubeSubgenOutputs> {
|
|
||||||
const outDir = path.resolve(resolvePathMaybe(args.youtubeSubgenOutDir));
|
|
||||||
fs.mkdirSync(outDir, { recursive: true });
|
|
||||||
|
|
||||||
const primaryLangCodes = uniqueNormalizedLangCodes(args.youtubePrimarySubLangs);
|
|
||||||
const secondaryLangCodes = uniqueNormalizedLangCodes(args.youtubeSecondarySubLangs);
|
|
||||||
const primaryLabel = preferredLangLabel(primaryLangCodes, 'primary');
|
|
||||||
const secondaryLabel = preferredLangLabel(secondaryLangCodes, 'secondary');
|
|
||||||
const secondaryCanUseWhisperTranslate =
|
|
||||||
secondaryLangCodes.includes('en') || secondaryLangCodes.includes('eng');
|
|
||||||
const ytdlpManualLangs = toYtdlpLangPattern([...primaryLangCodes, ...secondaryLangCodes]);
|
|
||||||
|
|
||||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yt-subgen-'));
|
|
||||||
const knownFiles = new Set<string>();
|
|
||||||
let keepTemp = args.youtubeSubgenKeepTemp;
|
|
||||||
|
|
||||||
const publishTrack = async (
|
|
||||||
lang: 'primary' | 'secondary',
|
|
||||||
source: SubtitleCandidate['source'],
|
|
||||||
selectedPath: string,
|
|
||||||
basename: string,
|
|
||||||
): Promise<string> => {
|
|
||||||
const langLabel = lang === 'primary' ? primaryLabel : secondaryLabel;
|
|
||||||
const taggedPath = path.join(outDir, `${basename}.${langLabel}.${sourceTag(source)}.srt`);
|
|
||||||
const aliasPath = path.join(outDir, `${basename}.${langLabel}.srt`);
|
|
||||||
fs.copyFileSync(selectedPath, taggedPath);
|
|
||||||
fs.copyFileSync(taggedPath, aliasPath);
|
|
||||||
log('info', args.logLevel, `Generated subtitle (${langLabel}, ${source}) -> ${aliasPath}`);
|
|
||||||
if (onReady) await onReady(lang, aliasPath);
|
|
||||||
return aliasPath;
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
log('debug', args.logLevel, `YouTube subtitle temp dir: ${tempDir}`);
|
|
||||||
const meta = await runExternalCommand(
|
|
||||||
'yt-dlp',
|
|
||||||
['--dump-single-json', '--no-warnings', target],
|
|
||||||
{
|
|
||||||
captureStdout: true,
|
|
||||||
logLevel: args.logLevel,
|
|
||||||
commandLabel: 'yt-dlp:meta',
|
|
||||||
},
|
|
||||||
state.youtubeSubgenChildren,
|
|
||||||
);
|
|
||||||
const metadata = JSON.parse(meta.stdout) as { id?: string };
|
|
||||||
const videoId = metadata.id || `${Date.now()}`;
|
|
||||||
const basename = normalizeBasename(videoId, videoId);
|
|
||||||
|
|
||||||
await runExternalCommand(
|
|
||||||
'yt-dlp',
|
|
||||||
[
|
|
||||||
'--skip-download',
|
|
||||||
'--no-warnings',
|
|
||||||
'--write-subs',
|
|
||||||
'--sub-format',
|
|
||||||
'srt/vtt/best',
|
|
||||||
'--sub-langs',
|
|
||||||
ytdlpManualLangs,
|
|
||||||
'-o',
|
|
||||||
path.join(tempDir, '%(id)s.%(ext)s'),
|
|
||||||
target,
|
|
||||||
],
|
|
||||||
{
|
|
||||||
allowFailure: true,
|
|
||||||
logLevel: args.logLevel,
|
|
||||||
commandLabel: 'yt-dlp:manual-subs',
|
|
||||||
streamOutput: true,
|
|
||||||
},
|
|
||||||
state.youtubeSubgenChildren,
|
|
||||||
);
|
|
||||||
|
|
||||||
const manualSubs = scanSubtitleCandidates(
|
|
||||||
tempDir,
|
|
||||||
knownFiles,
|
|
||||||
'manual',
|
|
||||||
primaryLangCodes,
|
|
||||||
secondaryLangCodes,
|
|
||||||
);
|
|
||||||
for (const sub of manualSubs) knownFiles.add(sub.path);
|
|
||||||
let primaryCandidates = manualSubs.filter((entry) => entry.lang === 'primary');
|
|
||||||
let secondaryCandidates = manualSubs.filter((entry) => entry.lang === 'secondary');
|
|
||||||
|
|
||||||
const missingAuto: string[] = [];
|
|
||||||
if (primaryCandidates.length === 0) missingAuto.push(toYtdlpLangPattern(primaryLangCodes));
|
|
||||||
if (secondaryCandidates.length === 0) missingAuto.push(toYtdlpLangPattern(secondaryLangCodes));
|
|
||||||
|
|
||||||
if (missingAuto.length > 0) {
|
|
||||||
await runExternalCommand(
|
|
||||||
'yt-dlp',
|
|
||||||
[
|
|
||||||
'--skip-download',
|
|
||||||
'--no-warnings',
|
|
||||||
'--write-auto-subs',
|
|
||||||
'--sub-format',
|
|
||||||
'srt/vtt/best',
|
|
||||||
'--sub-langs',
|
|
||||||
missingAuto.join(','),
|
|
||||||
'-o',
|
|
||||||
path.join(tempDir, '%(id)s.%(ext)s'),
|
|
||||||
target,
|
|
||||||
],
|
|
||||||
{
|
|
||||||
allowFailure: true,
|
|
||||||
logLevel: args.logLevel,
|
|
||||||
commandLabel: 'yt-dlp:auto-subs',
|
|
||||||
streamOutput: true,
|
|
||||||
},
|
|
||||||
state.youtubeSubgenChildren,
|
|
||||||
);
|
|
||||||
|
|
||||||
const autoSubs = scanSubtitleCandidates(
|
|
||||||
tempDir,
|
|
||||||
knownFiles,
|
|
||||||
'auto',
|
|
||||||
primaryLangCodes,
|
|
||||||
secondaryLangCodes,
|
|
||||||
);
|
|
||||||
for (const sub of autoSubs) knownFiles.add(sub.path);
|
|
||||||
primaryCandidates = primaryCandidates.concat(
|
|
||||||
autoSubs.filter((entry) => entry.lang === 'primary'),
|
|
||||||
);
|
|
||||||
secondaryCandidates = secondaryCandidates.concat(
|
|
||||||
autoSubs.filter((entry) => entry.lang === 'secondary'),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let primaryAlias = '';
|
|
||||||
let secondaryAlias = '';
|
|
||||||
const selectedPrimary = pickBestCandidate(primaryCandidates);
|
|
||||||
const selectedSecondary = pickBestCandidate(secondaryCandidates);
|
|
||||||
|
|
||||||
if (selectedPrimary) {
|
|
||||||
const srt = await convertToSrt(selectedPrimary.path, tempDir, primaryLabel);
|
|
||||||
primaryAlias = await publishTrack('primary', selectedPrimary.source, srt, basename);
|
|
||||||
}
|
|
||||||
if (selectedSecondary) {
|
|
||||||
const srt = await convertToSrt(selectedSecondary.path, tempDir, secondaryLabel);
|
|
||||||
secondaryAlias = await publishTrack('secondary', selectedSecondary.source, srt, basename);
|
|
||||||
}
|
|
||||||
|
|
||||||
const needsPrimaryWhisper = !selectedPrimary;
|
|
||||||
const needsSecondaryWhisper = !selectedSecondary && secondaryCanUseWhisperTranslate;
|
|
||||||
if (needsPrimaryWhisper || needsSecondaryWhisper) {
|
|
||||||
const whisperBin = resolveWhisperBinary(args);
|
|
||||||
const modelPath = args.whisperModel.trim()
|
|
||||||
? path.resolve(resolvePathMaybe(args.whisperModel.trim()))
|
|
||||||
: '';
|
|
||||||
const hasWhisperFallback = !!whisperBin && !!modelPath && fs.existsSync(modelPath);
|
|
||||||
|
|
||||||
if (!hasWhisperFallback) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
'Whisper fallback is not configured; continuing with available subtitle tracks.',
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
await runExternalCommand(
|
|
||||||
'yt-dlp',
|
|
||||||
[
|
|
||||||
'-f',
|
|
||||||
'bestaudio/best',
|
|
||||||
'--extract-audio',
|
|
||||||
'--audio-format',
|
|
||||||
args.youtubeSubgenAudioFormat,
|
|
||||||
'--no-warnings',
|
|
||||||
'-o',
|
|
||||||
path.join(tempDir, '%(id)s.%(ext)s'),
|
|
||||||
target,
|
|
||||||
],
|
|
||||||
{
|
|
||||||
logLevel: args.logLevel,
|
|
||||||
commandLabel: 'yt-dlp:audio',
|
|
||||||
streamOutput: true,
|
|
||||||
},
|
|
||||||
state.youtubeSubgenChildren,
|
|
||||||
);
|
|
||||||
const audioPath = findAudioFile(tempDir, args.youtubeSubgenAudioFormat);
|
|
||||||
if (!audioPath) {
|
|
||||||
throw new Error('Audio extraction succeeded, but no audio file was found.');
|
|
||||||
}
|
|
||||||
const whisperAudioPath = await convertAudioForWhisper(audioPath, tempDir);
|
|
||||||
|
|
||||||
if (needsPrimaryWhisper) {
|
|
||||||
try {
|
|
||||||
const primaryPrefix = path.join(tempDir, `${basename}.${primaryLabel}`);
|
|
||||||
const primarySrt = await runWhisper(
|
|
||||||
whisperBin!,
|
|
||||||
modelPath,
|
|
||||||
whisperAudioPath,
|
|
||||||
args.youtubeWhisperSourceLanguage,
|
|
||||||
false,
|
|
||||||
primaryPrefix,
|
|
||||||
);
|
|
||||||
primaryAlias = await publishTrack('primary', 'whisper', primarySrt, basename);
|
|
||||||
} catch (error) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Failed to generate primary subtitle via whisper fallback: ${(error as Error).message}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (needsSecondaryWhisper) {
|
|
||||||
try {
|
|
||||||
const secondaryPrefix = path.join(tempDir, `${basename}.${secondaryLabel}`);
|
|
||||||
const secondarySrt = await runWhisper(
|
|
||||||
whisperBin!,
|
|
||||||
modelPath,
|
|
||||||
whisperAudioPath,
|
|
||||||
args.youtubeWhisperSourceLanguage,
|
|
||||||
true,
|
|
||||||
secondaryPrefix,
|
|
||||||
);
|
|
||||||
secondaryAlias = await publishTrack(
|
|
||||||
'secondary',
|
|
||||||
'whisper-translate',
|
|
||||||
secondarySrt,
|
|
||||||
basename,
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Failed to generate secondary subtitle via whisper fallback: ${(error as Error).message}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Whisper fallback pipeline failed: ${(error as Error).message}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!secondaryCanUseWhisperTranslate && !selectedSecondary) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Secondary subtitle language (${secondaryLabel}) has no whisper translate fallback; relying on yt-dlp subtitles only.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!primaryAlias && !secondaryAlias) {
|
|
||||||
throw new Error('Failed to generate any subtitle tracks.');
|
|
||||||
}
|
|
||||||
if (!primaryAlias || !secondaryAlias) {
|
|
||||||
log(
|
|
||||||
'warn',
|
|
||||||
args.logLevel,
|
|
||||||
`Generated partial subtitle result: primary=${primaryAlias ? 'ok' : 'missing'}, secondary=${secondaryAlias ? 'ok' : 'missing'}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
basename,
|
|
||||||
primaryPath: primaryAlias || undefined,
|
|
||||||
secondaryPath: secondaryAlias || undefined,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
keepTemp = true;
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
if (keepTemp) {
|
|
||||||
log('warn', args.logLevel, `Keeping subtitle temp dir: ${tempDir}`);
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
||||||
} catch {
|
|
||||||
// ignore cleanup failures
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
84
launcher/youtube/audio-extraction.ts
Normal file
84
launcher/youtube/audio-extraction.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
import type { Args } from '../types.js';
|
||||||
|
import { YOUTUBE_AUDIO_EXTENSIONS } from '../types.js';
|
||||||
|
import { runExternalCommand } from '../util.js';
|
||||||
|
|
||||||
|
export function findAudioFile(tempDir: string, preferredExt: string): string | null {
|
||||||
|
const entries = fs.readdirSync(tempDir);
|
||||||
|
const audioFiles: Array<{ path: string; ext: string; mtimeMs: number }> = [];
|
||||||
|
for (const name of entries) {
|
||||||
|
const fullPath = path.join(tempDir, name);
|
||||||
|
let stat: fs.Stats;
|
||||||
|
try {
|
||||||
|
stat = fs.statSync(fullPath);
|
||||||
|
} catch {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!stat.isFile()) continue;
|
||||||
|
const ext = path.extname(name).toLowerCase();
|
||||||
|
if (!YOUTUBE_AUDIO_EXTENSIONS.has(ext)) continue;
|
||||||
|
audioFiles.push({ path: fullPath, ext, mtimeMs: stat.mtimeMs });
|
||||||
|
}
|
||||||
|
if (audioFiles.length === 0) return null;
|
||||||
|
const preferred = audioFiles.find((entry) => entry.ext === `.${preferredExt.toLowerCase()}`);
|
||||||
|
if (preferred) return preferred.path;
|
||||||
|
audioFiles.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
||||||
|
return audioFiles[0]?.path ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function convertAudioForWhisper(inputPath: string, tempDir: string): Promise<string> {
|
||||||
|
const wavPath = path.join(tempDir, 'whisper-input.wav');
|
||||||
|
await runExternalCommand('ffmpeg', [
|
||||||
|
'-y',
|
||||||
|
'-loglevel',
|
||||||
|
'error',
|
||||||
|
'-i',
|
||||||
|
inputPath,
|
||||||
|
'-ar',
|
||||||
|
'16000',
|
||||||
|
'-ac',
|
||||||
|
'1',
|
||||||
|
'-c:a',
|
||||||
|
'pcm_s16le',
|
||||||
|
wavPath,
|
||||||
|
]);
|
||||||
|
if (!fs.existsSync(wavPath)) {
|
||||||
|
throw new Error(`Failed to prepare whisper audio input: ${wavPath}`);
|
||||||
|
}
|
||||||
|
return wavPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadYoutubeAudio(
|
||||||
|
target: string,
|
||||||
|
args: Args,
|
||||||
|
tempDir: string,
|
||||||
|
childTracker?: Set<ReturnType<typeof import('node:child_process').spawn>>,
|
||||||
|
): Promise<string> {
|
||||||
|
await runExternalCommand(
|
||||||
|
'yt-dlp',
|
||||||
|
[
|
||||||
|
'-f',
|
||||||
|
'bestaudio/best',
|
||||||
|
'--extract-audio',
|
||||||
|
'--audio-format',
|
||||||
|
args.youtubeSubgenAudioFormat,
|
||||||
|
'--no-warnings',
|
||||||
|
'-o',
|
||||||
|
path.join(tempDir, '%(id)s.%(ext)s'),
|
||||||
|
target,
|
||||||
|
],
|
||||||
|
{
|
||||||
|
logLevel: args.logLevel,
|
||||||
|
commandLabel: 'yt-dlp:audio',
|
||||||
|
streamOutput: true,
|
||||||
|
},
|
||||||
|
childTracker,
|
||||||
|
);
|
||||||
|
const audioPath = findAudioFile(tempDir, args.youtubeSubgenAudioFormat);
|
||||||
|
if (!audioPath) {
|
||||||
|
throw new Error('Audio extraction succeeded, but no audio file was found.');
|
||||||
|
}
|
||||||
|
return audioPath;
|
||||||
|
}
|
||||||
99
launcher/youtube/manual-subs.ts
Normal file
99
launcher/youtube/manual-subs.ts
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
import type { SubtitleCandidate } from '../types.js';
|
||||||
|
import { YOUTUBE_SUB_EXTENSIONS } from '../types.js';
|
||||||
|
import { escapeRegExp, runExternalCommand } from '../util.js';
|
||||||
|
|
||||||
|
function filenameHasLanguageTag(filenameLower: string, langCode: string): boolean {
|
||||||
|
const escaped = escapeRegExp(langCode);
|
||||||
|
const pattern = new RegExp(`(^|[._-])${escaped}([._-]|$)`);
|
||||||
|
return pattern.test(filenameLower);
|
||||||
|
}
|
||||||
|
|
||||||
|
function classifyLanguage(
|
||||||
|
filename: string,
|
||||||
|
primaryLangCodes: string[],
|
||||||
|
secondaryLangCodes: string[],
|
||||||
|
): 'primary' | 'secondary' | null {
|
||||||
|
const lower = filename.toLowerCase();
|
||||||
|
const primary = primaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
|
||||||
|
const secondary = secondaryLangCodes.some((code) => filenameHasLanguageTag(lower, code));
|
||||||
|
if (primary && !secondary) return 'primary';
|
||||||
|
if (secondary && !primary) return 'secondary';
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toYtdlpLangPattern(langCodes: string[]): string {
|
||||||
|
return langCodes.map((lang) => `${lang}.*`).join(',');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function scanSubtitleCandidates(
|
||||||
|
tempDir: string,
|
||||||
|
knownSet: Set<string>,
|
||||||
|
source: SubtitleCandidate['source'],
|
||||||
|
primaryLangCodes: string[],
|
||||||
|
secondaryLangCodes: string[],
|
||||||
|
): SubtitleCandidate[] {
|
||||||
|
const entries = fs.readdirSync(tempDir);
|
||||||
|
const out: SubtitleCandidate[] = [];
|
||||||
|
for (const name of entries) {
|
||||||
|
const fullPath = path.join(tempDir, name);
|
||||||
|
if (knownSet.has(fullPath)) continue;
|
||||||
|
let stat: fs.Stats;
|
||||||
|
try {
|
||||||
|
stat = fs.statSync(fullPath);
|
||||||
|
} catch {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!stat.isFile()) continue;
|
||||||
|
const ext = path.extname(fullPath).toLowerCase();
|
||||||
|
if (!YOUTUBE_SUB_EXTENSIONS.has(ext)) continue;
|
||||||
|
const lang = classifyLanguage(name, primaryLangCodes, secondaryLangCodes);
|
||||||
|
if (!lang) continue;
|
||||||
|
out.push({ path: fullPath, lang, ext, size: stat.size, source });
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function pickBestCandidate(candidates: SubtitleCandidate[]): SubtitleCandidate | null {
|
||||||
|
if (candidates.length === 0) return null;
|
||||||
|
const scored = [...candidates].sort((a, b) => {
|
||||||
|
const srtA = a.ext === '.srt' ? 1 : 0;
|
||||||
|
const srtB = b.ext === '.srt' ? 1 : 0;
|
||||||
|
if (srtA !== srtB) return srtB - srtA;
|
||||||
|
return b.size - a.size;
|
||||||
|
});
|
||||||
|
return scored[0] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadManualSubtitles(
|
||||||
|
target: string,
|
||||||
|
tempDir: string,
|
||||||
|
langPattern: string,
|
||||||
|
logLevel: import('../types.js').LogLevel,
|
||||||
|
childTracker?: Set<ReturnType<typeof import('node:child_process').spawn>>,
|
||||||
|
): Promise<void> {
|
||||||
|
await runExternalCommand(
|
||||||
|
'yt-dlp',
|
||||||
|
[
|
||||||
|
'--skip-download',
|
||||||
|
'--no-warnings',
|
||||||
|
'--write-subs',
|
||||||
|
'--sub-format',
|
||||||
|
'srt/vtt/best',
|
||||||
|
'--sub-langs',
|
||||||
|
langPattern,
|
||||||
|
'-o',
|
||||||
|
path.join(tempDir, '%(id)s.%(ext)s'),
|
||||||
|
target,
|
||||||
|
],
|
||||||
|
{
|
||||||
|
allowFailure: true,
|
||||||
|
logLevel,
|
||||||
|
commandLabel: 'yt-dlp:manual-subs',
|
||||||
|
streamOutput: true,
|
||||||
|
},
|
||||||
|
childTracker,
|
||||||
|
);
|
||||||
|
}
|
||||||
58
launcher/youtube/orchestrator.test.ts
Normal file
58
launcher/youtube/orchestrator.test.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
|
||||||
|
import { planYoutubeSubtitleGeneration } from './orchestrator';
|
||||||
|
|
||||||
|
test('planYoutubeSubtitleGeneration prefers manual subtitles and never schedules auto-subs', () => {
|
||||||
|
assert.deepEqual(
|
||||||
|
planYoutubeSubtitleGeneration({
|
||||||
|
hasPrimaryManualSubtitle: true,
|
||||||
|
hasSecondaryManualSubtitle: false,
|
||||||
|
secondaryCanTranslate: true,
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
fetchManualSubtitles: true,
|
||||||
|
fetchAutoSubtitles: false,
|
||||||
|
publishPrimaryManualSubtitle: false,
|
||||||
|
publishSecondaryManualSubtitle: false,
|
||||||
|
generatePrimarySubtitle: false,
|
||||||
|
generateSecondarySubtitle: true,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('planYoutubeSubtitleGeneration generates only missing tracks', () => {
|
||||||
|
assert.deepEqual(
|
||||||
|
planYoutubeSubtitleGeneration({
|
||||||
|
hasPrimaryManualSubtitle: false,
|
||||||
|
hasSecondaryManualSubtitle: true,
|
||||||
|
secondaryCanTranslate: true,
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
fetchManualSubtitles: true,
|
||||||
|
fetchAutoSubtitles: false,
|
||||||
|
publishPrimaryManualSubtitle: false,
|
||||||
|
publishSecondaryManualSubtitle: false,
|
||||||
|
generatePrimarySubtitle: true,
|
||||||
|
generateSecondarySubtitle: false,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('planYoutubeSubtitleGeneration reuses manual tracks already present on the YouTube video', () => {
|
||||||
|
assert.deepEqual(
|
||||||
|
planYoutubeSubtitleGeneration({
|
||||||
|
hasPrimaryManualSubtitle: true,
|
||||||
|
hasSecondaryManualSubtitle: true,
|
||||||
|
secondaryCanTranslate: true,
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
fetchManualSubtitles: true,
|
||||||
|
fetchAutoSubtitles: false,
|
||||||
|
publishPrimaryManualSubtitle: false,
|
||||||
|
publishSecondaryManualSubtitle: false,
|
||||||
|
generatePrimarySubtitle: false,
|
||||||
|
generateSecondarySubtitle: false,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
});
|
||||||
367
launcher/youtube/orchestrator.ts
Normal file
367
launcher/youtube/orchestrator.ts
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
import type { Args, SubtitleCandidate, YoutubeSubgenOutputs } from '../types.js';
|
||||||
|
import { log } from '../log.js';
|
||||||
|
import {
|
||||||
|
commandExists,
|
||||||
|
normalizeBasename,
|
||||||
|
resolvePathMaybe,
|
||||||
|
runExternalCommand,
|
||||||
|
uniqueNormalizedLangCodes,
|
||||||
|
} from '../util.js';
|
||||||
|
import { state } from '../mpv.js';
|
||||||
|
import { downloadYoutubeAudio, convertAudioForWhisper } from './audio-extraction.js';
|
||||||
|
import {
|
||||||
|
downloadManualSubtitles,
|
||||||
|
pickBestCandidate,
|
||||||
|
scanSubtitleCandidates,
|
||||||
|
toYtdlpLangPattern,
|
||||||
|
} from './manual-subs.js';
|
||||||
|
import { runLoggedYoutubePhase } from './progress.js';
|
||||||
|
import { fixSubtitleWithAi } from './subtitle-fix-ai.js';
|
||||||
|
import { runWhisper } from './whisper.js';
|
||||||
|
|
||||||
|
export interface YoutubeSubtitleGenerationPlan {
|
||||||
|
fetchManualSubtitles: true;
|
||||||
|
fetchAutoSubtitles: false;
|
||||||
|
publishPrimaryManualSubtitle: false;
|
||||||
|
publishSecondaryManualSubtitle: false;
|
||||||
|
generatePrimarySubtitle: boolean;
|
||||||
|
generateSecondarySubtitle: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function planYoutubeSubtitleGeneration(input: {
|
||||||
|
hasPrimaryManualSubtitle: boolean;
|
||||||
|
hasSecondaryManualSubtitle: boolean;
|
||||||
|
secondaryCanTranslate: boolean;
|
||||||
|
}): YoutubeSubtitleGenerationPlan {
|
||||||
|
return {
|
||||||
|
fetchManualSubtitles: true,
|
||||||
|
fetchAutoSubtitles: false,
|
||||||
|
publishPrimaryManualSubtitle: false,
|
||||||
|
publishSecondaryManualSubtitle: false,
|
||||||
|
generatePrimarySubtitle: !input.hasPrimaryManualSubtitle,
|
||||||
|
generateSecondarySubtitle: !input.hasSecondaryManualSubtitle && input.secondaryCanTranslate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function preferredLangLabel(langCodes: string[], fallback: string): string {
|
||||||
|
return uniqueNormalizedLangCodes(langCodes)[0] || fallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sourceTag(source: SubtitleCandidate['source']): string {
|
||||||
|
return source;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveWhisperBinary(args: Args): string | null {
|
||||||
|
const explicit = args.whisperBin.trim();
|
||||||
|
if (explicit) return resolvePathMaybe(explicit);
|
||||||
|
if (commandExists('whisper-cli')) return 'whisper-cli';
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function maybeFixSubtitleWithAi(
|
||||||
|
selectedPath: string,
|
||||||
|
args: Args,
|
||||||
|
expectedLanguage?: string,
|
||||||
|
): Promise<string> {
|
||||||
|
if (!args.youtubeFixWithAi || args.aiConfig.enabled !== true) {
|
||||||
|
return selectedPath;
|
||||||
|
}
|
||||||
|
const fixedContent = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: `Starting AI subtitle fix: ${path.basename(selectedPath)}`,
|
||||||
|
finishMessage: `Finished AI subtitle fix: ${path.basename(selectedPath)}`,
|
||||||
|
failureMessage: `AI subtitle fix failed: ${path.basename(selectedPath)}`,
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
async () => {
|
||||||
|
const originalContent = fs.readFileSync(selectedPath, 'utf8');
|
||||||
|
return fixSubtitleWithAi(
|
||||||
|
originalContent,
|
||||||
|
args.aiConfig,
|
||||||
|
(message) => {
|
||||||
|
log('warn', args.logLevel, message);
|
||||||
|
},
|
||||||
|
expectedLanguage,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (!fixedContent) {
|
||||||
|
return selectedPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fixedPath = selectedPath.replace(/\.srt$/i, '.fixed.srt');
|
||||||
|
fs.writeFileSync(fixedPath, fixedContent, 'utf8');
|
||||||
|
return fixedPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function generateYoutubeSubtitles(
|
||||||
|
target: string,
|
||||||
|
args: Args,
|
||||||
|
onReady?: (lang: 'primary' | 'secondary', pathToLoad: string) => Promise<void>,
|
||||||
|
): Promise<YoutubeSubgenOutputs> {
|
||||||
|
const outDir = path.resolve(resolvePathMaybe(args.youtubeSubgenOutDir));
|
||||||
|
fs.mkdirSync(outDir, { recursive: true });
|
||||||
|
|
||||||
|
const primaryLangCodes = uniqueNormalizedLangCodes(args.youtubePrimarySubLangs);
|
||||||
|
const secondaryLangCodes = uniqueNormalizedLangCodes(args.youtubeSecondarySubLangs);
|
||||||
|
const primaryLabel = preferredLangLabel(primaryLangCodes, 'primary');
|
||||||
|
const secondaryLabel = preferredLangLabel(secondaryLangCodes, 'secondary');
|
||||||
|
const secondaryCanUseWhisperTranslate =
|
||||||
|
secondaryLangCodes.includes('en') || secondaryLangCodes.includes('eng');
|
||||||
|
const manualLangs = toYtdlpLangPattern([...primaryLangCodes, ...secondaryLangCodes]);
|
||||||
|
|
||||||
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yt-subgen-'));
|
||||||
|
const knownFiles = new Set<string>();
|
||||||
|
let keepTemp = args.youtubeSubgenKeepTemp;
|
||||||
|
|
||||||
|
const publishTrack = async (
|
||||||
|
lang: 'primary' | 'secondary',
|
||||||
|
source: SubtitleCandidate['source'],
|
||||||
|
selectedPath: string,
|
||||||
|
basename: string,
|
||||||
|
): Promise<string> => {
|
||||||
|
const langLabel = lang === 'primary' ? primaryLabel : secondaryLabel;
|
||||||
|
const taggedPath = path.join(outDir, `${basename}.${langLabel}.${sourceTag(source)}.srt`);
|
||||||
|
const aliasPath = path.join(outDir, `${basename}.${langLabel}.srt`);
|
||||||
|
fs.copyFileSync(selectedPath, taggedPath);
|
||||||
|
fs.copyFileSync(taggedPath, aliasPath);
|
||||||
|
log('info', args.logLevel, `Generated subtitle (${langLabel}, ${source}) -> ${aliasPath}`);
|
||||||
|
if (onReady) await onReady(lang, aliasPath);
|
||||||
|
return aliasPath;
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const meta = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: 'Starting YouTube metadata probe',
|
||||||
|
finishMessage: 'Finished YouTube metadata probe',
|
||||||
|
failureMessage: 'YouTube metadata probe failed',
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
() =>
|
||||||
|
runExternalCommand(
|
||||||
|
'yt-dlp',
|
||||||
|
['--dump-single-json', '--no-warnings', target],
|
||||||
|
{
|
||||||
|
captureStdout: true,
|
||||||
|
logLevel: args.logLevel,
|
||||||
|
commandLabel: 'yt-dlp:meta',
|
||||||
|
},
|
||||||
|
state.youtubeSubgenChildren,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const metadata = JSON.parse(meta.stdout) as { id?: string };
|
||||||
|
const videoId = metadata.id || `${Date.now()}`;
|
||||||
|
const basename = normalizeBasename(videoId, videoId);
|
||||||
|
|
||||||
|
await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: `Starting manual subtitle probe (${manualLangs || 'requested langs'})`,
|
||||||
|
finishMessage: 'Finished manual subtitle probe',
|
||||||
|
failureMessage: 'Manual subtitle probe failed',
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
() =>
|
||||||
|
downloadManualSubtitles(
|
||||||
|
target,
|
||||||
|
tempDir,
|
||||||
|
manualLangs,
|
||||||
|
args.logLevel,
|
||||||
|
state.youtubeSubgenChildren,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const manualSubs = scanSubtitleCandidates(
|
||||||
|
tempDir,
|
||||||
|
knownFiles,
|
||||||
|
'manual',
|
||||||
|
primaryLangCodes,
|
||||||
|
secondaryLangCodes,
|
||||||
|
);
|
||||||
|
for (const sub of manualSubs) knownFiles.add(sub.path);
|
||||||
|
const selectedPrimary = pickBestCandidate(
|
||||||
|
manualSubs.filter((entry) => entry.lang === 'primary'),
|
||||||
|
);
|
||||||
|
const selectedSecondary = pickBestCandidate(
|
||||||
|
manualSubs.filter((entry) => entry.lang === 'secondary'),
|
||||||
|
);
|
||||||
|
|
||||||
|
const plan = planYoutubeSubtitleGeneration({
|
||||||
|
hasPrimaryManualSubtitle: Boolean(selectedPrimary),
|
||||||
|
hasSecondaryManualSubtitle: Boolean(selectedSecondary),
|
||||||
|
secondaryCanTranslate: secondaryCanUseWhisperTranslate,
|
||||||
|
});
|
||||||
|
|
||||||
|
let primaryAlias = '';
|
||||||
|
let secondaryAlias = '';
|
||||||
|
|
||||||
|
if (selectedPrimary) {
|
||||||
|
log(
|
||||||
|
'info',
|
||||||
|
args.logLevel,
|
||||||
|
`Using native YouTube subtitle track for primary (${primaryLabel}); skipping external subtitle copy.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (selectedSecondary) {
|
||||||
|
log(
|
||||||
|
'info',
|
||||||
|
args.logLevel,
|
||||||
|
`Using native YouTube subtitle track for secondary (${secondaryLabel}); skipping external subtitle copy.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (plan.generatePrimarySubtitle || plan.generateSecondarySubtitle) {
|
||||||
|
const whisperBin = resolveWhisperBinary(args);
|
||||||
|
const modelPath = args.whisperModel.trim()
|
||||||
|
? path.resolve(resolvePathMaybe(args.whisperModel.trim()))
|
||||||
|
: '';
|
||||||
|
const hasWhisperFallback = !!whisperBin && !!modelPath && fs.existsSync(modelPath);
|
||||||
|
|
||||||
|
if (!hasWhisperFallback) {
|
||||||
|
log(
|
||||||
|
'warn',
|
||||||
|
args.logLevel,
|
||||||
|
'Whisper fallback is not configured; continuing with available subtitle tracks.',
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const audioPath = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: 'Starting fallback audio extraction for subtitle generation',
|
||||||
|
finishMessage: 'Finished fallback audio extraction',
|
||||||
|
failureMessage: 'Fallback audio extraction failed',
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
() =>
|
||||||
|
downloadYoutubeAudio(target, args, tempDir, state.youtubeSubgenChildren),
|
||||||
|
);
|
||||||
|
const whisperAudioPath = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: 'Starting ffmpeg audio prep for whisper',
|
||||||
|
finishMessage: 'Finished ffmpeg audio prep for whisper',
|
||||||
|
failureMessage: 'ffmpeg audio prep for whisper failed',
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
() => convertAudioForWhisper(audioPath, tempDir),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (plan.generatePrimarySubtitle) {
|
||||||
|
try {
|
||||||
|
const primaryPrefix = path.join(tempDir, `${basename}.${primaryLabel}`);
|
||||||
|
const primarySrt = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: `Starting whisper primary subtitle generation (${primaryLabel})`,
|
||||||
|
finishMessage: `Finished whisper primary subtitle generation (${primaryLabel})`,
|
||||||
|
failureMessage: `Whisper primary subtitle generation failed (${primaryLabel})`,
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
() =>
|
||||||
|
runWhisper(whisperBin!, args, {
|
||||||
|
modelPath,
|
||||||
|
audioPath: whisperAudioPath,
|
||||||
|
language: args.youtubeWhisperSourceLanguage,
|
||||||
|
translate: false,
|
||||||
|
outputPrefix: primaryPrefix,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
const fixedPrimary = await maybeFixSubtitleWithAi(
|
||||||
|
primarySrt,
|
||||||
|
args,
|
||||||
|
args.youtubeWhisperSourceLanguage,
|
||||||
|
);
|
||||||
|
primaryAlias = await publishTrack(
|
||||||
|
'primary',
|
||||||
|
fixedPrimary === primarySrt ? 'whisper' : 'whisper-fixed',
|
||||||
|
fixedPrimary,
|
||||||
|
basename,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
log(
|
||||||
|
'warn',
|
||||||
|
args.logLevel,
|
||||||
|
`Failed to generate primary subtitle via whisper fallback: ${(error as Error).message}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (plan.generateSecondarySubtitle) {
|
||||||
|
try {
|
||||||
|
const secondaryPrefix = path.join(tempDir, `${basename}.${secondaryLabel}`);
|
||||||
|
const secondarySrt = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: `Starting whisper secondary subtitle generation (${secondaryLabel})`,
|
||||||
|
finishMessage: `Finished whisper secondary subtitle generation (${secondaryLabel})`,
|
||||||
|
failureMessage: `Whisper secondary subtitle generation failed (${secondaryLabel})`,
|
||||||
|
log: (level, message) => log(level, args.logLevel, message),
|
||||||
|
},
|
||||||
|
() =>
|
||||||
|
runWhisper(whisperBin!, args, {
|
||||||
|
modelPath,
|
||||||
|
audioPath: whisperAudioPath,
|
||||||
|
language: args.youtubeWhisperSourceLanguage,
|
||||||
|
translate: true,
|
||||||
|
outputPrefix: secondaryPrefix,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
const fixedSecondary = await maybeFixSubtitleWithAi(secondarySrt, args);
|
||||||
|
secondaryAlias = await publishTrack(
|
||||||
|
'secondary',
|
||||||
|
fixedSecondary === secondarySrt ? 'whisper-translate' : 'whisper-translate-fixed',
|
||||||
|
fixedSecondary,
|
||||||
|
basename,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
log(
|
||||||
|
'warn',
|
||||||
|
args.logLevel,
|
||||||
|
`Failed to generate secondary subtitle via whisper fallback: ${(error as Error).message}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!secondaryCanUseWhisperTranslate && !selectedSecondary) {
|
||||||
|
log(
|
||||||
|
'warn',
|
||||||
|
args.logLevel,
|
||||||
|
`Secondary subtitle language (${secondaryLabel}) has no whisper translate fallback; relying on manual subtitles only.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!primaryAlias && !secondaryAlias && !selectedPrimary && !selectedSecondary) {
|
||||||
|
throw new Error('Failed to generate any subtitle tracks.');
|
||||||
|
}
|
||||||
|
if ((!primaryAlias && !selectedPrimary) || (!secondaryAlias && !selectedSecondary)) {
|
||||||
|
log(
|
||||||
|
'warn',
|
||||||
|
args.logLevel,
|
||||||
|
`Generated partial subtitle result: primary=${primaryAlias || selectedPrimary ? 'ok' : 'missing'}, secondary=${secondaryAlias || selectedSecondary ? 'ok' : 'missing'}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
basename,
|
||||||
|
primaryPath: primaryAlias || undefined,
|
||||||
|
secondaryPath: secondaryAlias || undefined,
|
||||||
|
primaryNative: Boolean(selectedPrimary),
|
||||||
|
secondaryNative: Boolean(selectedSecondary),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
keepTemp = true;
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
if (keepTemp) {
|
||||||
|
log('warn', args.logLevel, `Keeping subtitle temp dir: ${tempDir}`);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||||
|
} catch {
|
||||||
|
// ignore cleanup failures
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
55
launcher/youtube/progress.test.ts
Normal file
55
launcher/youtube/progress.test.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
|
||||||
|
import { runLoggedYoutubePhase } from './progress';
|
||||||
|
|
||||||
|
test('runLoggedYoutubePhase logs start and finish with elapsed time', async () => {
|
||||||
|
const entries: Array<{ level: 'info' | 'warn'; message: string }> = [];
|
||||||
|
let nowMs = 1_000;
|
||||||
|
|
||||||
|
const result = await runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: 'Starting subtitle probe',
|
||||||
|
finishMessage: 'Finished subtitle probe',
|
||||||
|
log: (level, message) => entries.push({ level, message }),
|
||||||
|
now: () => nowMs,
|
||||||
|
},
|
||||||
|
async () => {
|
||||||
|
nowMs = 2_500;
|
||||||
|
return 'ok';
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(result, 'ok');
|
||||||
|
assert.deepEqual(entries, [
|
||||||
|
{ level: 'info', message: 'Starting subtitle probe' },
|
||||||
|
{ level: 'info', message: 'Finished subtitle probe (1.5s)' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('runLoggedYoutubePhase logs failure with elapsed time and rethrows', async () => {
|
||||||
|
const entries: Array<{ level: 'info' | 'warn'; message: string }> = [];
|
||||||
|
let nowMs = 5_000;
|
||||||
|
|
||||||
|
await assert.rejects(
|
||||||
|
runLoggedYoutubePhase(
|
||||||
|
{
|
||||||
|
startMessage: 'Starting whisper primary',
|
||||||
|
finishMessage: 'Finished whisper primary',
|
||||||
|
failureMessage: 'Failed whisper primary',
|
||||||
|
log: (level, message) => entries.push({ level, message }),
|
||||||
|
now: () => nowMs,
|
||||||
|
},
|
||||||
|
async () => {
|
||||||
|
nowMs = 8_200;
|
||||||
|
throw new Error('boom');
|
||||||
|
},
|
||||||
|
),
|
||||||
|
/boom/,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.deepEqual(entries, [
|
||||||
|
{ level: 'info', message: 'Starting whisper primary' },
|
||||||
|
{ level: 'warn', message: 'Failed whisper primary after 3.2s: boom' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
33
launcher/youtube/progress.ts
Normal file
33
launcher/youtube/progress.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
type PhaseLogLevel = 'info' | 'warn';
|
||||||
|
|
||||||
|
export interface RunLoggedYoutubePhaseOptions {
|
||||||
|
startMessage: string;
|
||||||
|
finishMessage: string;
|
||||||
|
failureMessage?: string;
|
||||||
|
log: (level: PhaseLogLevel, message: string) => void;
|
||||||
|
now?: () => number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatElapsedMs(elapsedMs: number): string {
|
||||||
|
const seconds = Math.max(0, elapsedMs) / 1000;
|
||||||
|
return `${seconds.toFixed(1)}s`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runLoggedYoutubePhase<T>(
|
||||||
|
options: RunLoggedYoutubePhaseOptions,
|
||||||
|
run: () => Promise<T>,
|
||||||
|
): Promise<T> {
|
||||||
|
const now = options.now ?? Date.now;
|
||||||
|
const startedAt = now();
|
||||||
|
options.log('info', options.startMessage);
|
||||||
|
try {
|
||||||
|
const result = await run();
|
||||||
|
options.log('info', `${options.finishMessage} (${formatElapsedMs(now() - startedAt)})`);
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
const prefix = options.failureMessage ?? options.finishMessage;
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
options.log('warn', `${prefix} after ${formatElapsedMs(now() - startedAt)}: ${message}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
32
launcher/youtube/srt.test.ts
Normal file
32
launcher/youtube/srt.test.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
|
||||||
|
import { parseSrt, stringifySrt } from './srt';
|
||||||
|
|
||||||
|
test('parseSrt reads cue numbering timing and text', () => {
|
||||||
|
const cues = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんにちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:02,500 --> 00:00:03,000
|
||||||
|
世界
|
||||||
|
`);
|
||||||
|
|
||||||
|
assert.equal(cues.length, 2);
|
||||||
|
assert.equal(cues[0]?.start, '00:00:01,000');
|
||||||
|
assert.equal(cues[0]?.end, '00:00:02,000');
|
||||||
|
assert.equal(cues[0]?.text, 'こんにちは');
|
||||||
|
assert.equal(cues[1]?.text, '世界');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('stringifySrt preserves parseable cue structure', () => {
|
||||||
|
const roundTrip = stringifySrt(
|
||||||
|
parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんにちは
|
||||||
|
`),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.match(roundTrip, /1\n00:00:01,000 --> 00:00:02,000\nこんにちは/);
|
||||||
|
});
|
||||||
40
launcher/youtube/srt.ts
Normal file
40
launcher/youtube/srt.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
export interface SrtCue {
|
||||||
|
index: number;
|
||||||
|
start: string;
|
||||||
|
end: string;
|
||||||
|
text: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const TIMING_LINE_PATTERN =
|
||||||
|
/^(?<start>\d{2}:\d{2}:\d{2},\d{3}) --> (?<end>\d{2}:\d{2}:\d{2},\d{3})$/;
|
||||||
|
|
||||||
|
export function parseSrt(content: string): SrtCue[] {
|
||||||
|
const normalized = content.replace(/\r\n/g, '\n').trim();
|
||||||
|
if (!normalized) return [];
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
.split(/\n{2,}/)
|
||||||
|
.map((block) => {
|
||||||
|
const lines = block.split('\n');
|
||||||
|
const index = Number.parseInt(lines[0] || '', 10);
|
||||||
|
const timingLine = lines[1] || '';
|
||||||
|
const timingMatch = TIMING_LINE_PATTERN.exec(timingLine);
|
||||||
|
if (!Number.isInteger(index) || !timingMatch?.groups) {
|
||||||
|
throw new Error(`Invalid SRT cue block: ${block}`);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
index,
|
||||||
|
start: timingMatch.groups.start!,
|
||||||
|
end: timingMatch.groups.end!,
|
||||||
|
text: lines.slice(2).join('\n').trim(),
|
||||||
|
} satisfies SrtCue;
|
||||||
|
})
|
||||||
|
.filter((cue) => cue.text.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stringifySrt(cues: SrtCue[]): string {
|
||||||
|
return cues
|
||||||
|
.map((cue, idx) => `${idx + 1}\n${cue.start} --> ${cue.end}\n${cue.text.trim()}\n`)
|
||||||
|
.join('\n')
|
||||||
|
.trimEnd();
|
||||||
|
}
|
||||||
126
launcher/youtube/subtitle-fix-ai.test.ts
Normal file
126
launcher/youtube/subtitle-fix-ai.test.ts
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
|
||||||
|
import { applyFixedCueBatch, parseAiSubtitleFixResponse } from './subtitle-fix-ai';
|
||||||
|
import { parseSrt } from './srt';
|
||||||
|
|
||||||
|
test('applyFixedCueBatch accepts content-only fixes with identical timing', () => {
|
||||||
|
const original = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんいちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
世界
|
||||||
|
`);
|
||||||
|
const fixed = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんにちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
世界
|
||||||
|
`);
|
||||||
|
|
||||||
|
const merged = applyFixedCueBatch(original, fixed);
|
||||||
|
assert.equal(merged[0]?.text, 'こんにちは');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('applyFixedCueBatch rejects changed timestamps', () => {
|
||||||
|
const original = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんいちは
|
||||||
|
`);
|
||||||
|
const fixed = parseSrt(`1
|
||||||
|
00:00:01,100 --> 00:00:02,000
|
||||||
|
こんにちは
|
||||||
|
`);
|
||||||
|
|
||||||
|
assert.throws(() => applyFixedCueBatch(original, fixed), /timestamps/i);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseAiSubtitleFixResponse accepts valid SRT wrapped in markdown fences', () => {
|
||||||
|
const original = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんいちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
世界
|
||||||
|
`);
|
||||||
|
|
||||||
|
const parsed = parseAiSubtitleFixResponse(
|
||||||
|
original,
|
||||||
|
'```srt\n1\n00:00:01,000 --> 00:00:02,000\nこんにちは\n\n2\n00:00:03,000 --> 00:00:04,000\n世界\n```',
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(parsed[0]?.text, 'こんにちは');
|
||||||
|
assert.equal(parsed[1]?.text, '世界');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseAiSubtitleFixResponse accepts text-only one-block-per-cue output', () => {
|
||||||
|
const original = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんいちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
世界
|
||||||
|
`);
|
||||||
|
|
||||||
|
const parsed = parseAiSubtitleFixResponse(
|
||||||
|
original,
|
||||||
|
`こんにちは
|
||||||
|
|
||||||
|
世界`,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(parsed[0]?.start, '00:00:01,000');
|
||||||
|
assert.equal(parsed[0]?.text, 'こんにちは');
|
||||||
|
assert.equal(parsed[1]?.end, '00:00:04,000');
|
||||||
|
assert.equal(parsed[1]?.text, '世界');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseAiSubtitleFixResponse rejects unrecoverable text-only output', () => {
|
||||||
|
const original = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんいちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
世界
|
||||||
|
`);
|
||||||
|
|
||||||
|
assert.throws(
|
||||||
|
() => parseAiSubtitleFixResponse(original, 'こんにちは\n世界\n余分です'),
|
||||||
|
/cue block|cue count/i,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseAiSubtitleFixResponse rejects language drift for primary Japanese subtitles', () => {
|
||||||
|
const original = parseSrt(`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
こんにちは
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
今日はいい天気ですね
|
||||||
|
`);
|
||||||
|
|
||||||
|
assert.throws(
|
||||||
|
() =>
|
||||||
|
parseAiSubtitleFixResponse(
|
||||||
|
original,
|
||||||
|
`1
|
||||||
|
00:00:01,000 --> 00:00:02,000
|
||||||
|
Hello
|
||||||
|
|
||||||
|
2
|
||||||
|
00:00:03,000 --> 00:00:04,000
|
||||||
|
The weather is nice today
|
||||||
|
`,
|
||||||
|
'ja',
|
||||||
|
),
|
||||||
|
/language/i,
|
||||||
|
);
|
||||||
|
});
|
||||||
213
launcher/youtube/subtitle-fix-ai.ts
Normal file
213
launcher/youtube/subtitle-fix-ai.ts
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
import type { LauncherAiConfig } from '../types.js';
|
||||||
|
import { requestAiChatCompletion, resolveAiApiKey } from '../../src/ai/client.js';
|
||||||
|
import { parseSrt, stringifySrt, type SrtCue } from './srt.js';
|
||||||
|
|
||||||
|
const DEFAULT_SUBTITLE_FIX_PROMPT =
|
||||||
|
'Fix transcription mistakes only. Preserve cue numbering, timestamps, and valid SRT formatting exactly. Return only corrected SRT.';
|
||||||
|
|
||||||
|
const SRT_BLOCK_PATTERN =
|
||||||
|
/(?:^|\n)(\d+\n\d{2}:\d{2}:\d{2},\d{3} --> \d{2}:\d{2}:\d{2},\d{3}[\s\S]*)$/;
|
||||||
|
const CODE_FENCE_PATTERN = /^```(?:\w+)?\s*\n([\s\S]*?)\n```$/;
|
||||||
|
const JAPANESE_CHAR_PATTERN = /[\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Han}]/gu;
|
||||||
|
const LATIN_LETTER_PATTERN = /\p{Script=Latin}/gu;
|
||||||
|
|
||||||
|
export function applyFixedCueBatch(original: SrtCue[], fixed: SrtCue[]): SrtCue[] {
|
||||||
|
if (original.length !== fixed.length) {
|
||||||
|
throw new Error('Fixed subtitle batch must preserve cue count.');
|
||||||
|
}
|
||||||
|
|
||||||
|
return original.map((cue, index) => {
|
||||||
|
const nextCue = fixed[index];
|
||||||
|
if (!nextCue) {
|
||||||
|
throw new Error('Missing fixed subtitle cue.');
|
||||||
|
}
|
||||||
|
if (cue.start !== nextCue.start || cue.end !== nextCue.end) {
|
||||||
|
throw new Error('Fixed subtitle batch must preserve cue timestamps.');
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...cue,
|
||||||
|
text: nextCue.text,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function chunkCues(cues: SrtCue[], size: number): SrtCue[][] {
|
||||||
|
const chunks: SrtCue[][] = [];
|
||||||
|
for (let index = 0; index < cues.length; index += size) {
|
||||||
|
chunks.push(cues.slice(index, index + size));
|
||||||
|
}
|
||||||
|
return chunks;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeAiSubtitleFixCandidates(content: string): string[] {
|
||||||
|
const trimmed = content.replace(/\r\n/g, '\n').trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const candidates = new Set<string>([trimmed]);
|
||||||
|
const fenced = CODE_FENCE_PATTERN.exec(trimmed)?.[1]?.trim();
|
||||||
|
if (fenced) {
|
||||||
|
candidates.add(fenced);
|
||||||
|
}
|
||||||
|
|
||||||
|
const srtBlock = SRT_BLOCK_PATTERN.exec(trimmed)?.[1]?.trim();
|
||||||
|
if (srtBlock) {
|
||||||
|
candidates.add(srtBlock);
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...candidates];
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseTextOnlyCueBatch(original: SrtCue[], content: string): SrtCue[] {
|
||||||
|
const paragraphBlocks = content
|
||||||
|
.split(/\n{2,}/)
|
||||||
|
.map((block) => block.trim())
|
||||||
|
.filter((block) => block.length > 0);
|
||||||
|
if (paragraphBlocks.length === original.length) {
|
||||||
|
return original.map((cue, index) => ({
|
||||||
|
...cue,
|
||||||
|
text: paragraphBlocks[index]!,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
const lineBlocks = content
|
||||||
|
.split('\n')
|
||||||
|
.map((line) => line.trim())
|
||||||
|
.filter((line) => line.length > 0);
|
||||||
|
if (lineBlocks.length === original.length) {
|
||||||
|
return original.map((cue, index) => ({
|
||||||
|
...cue,
|
||||||
|
text: lineBlocks[index]!,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Fixed subtitle batch must preserve cue count.');
|
||||||
|
}
|
||||||
|
|
||||||
|
function countPatternMatches(content: string, pattern: RegExp): number {
|
||||||
|
pattern.lastIndex = 0;
|
||||||
|
return [...content.matchAll(pattern)].length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isJapaneseLanguageCode(language: string | undefined): boolean {
|
||||||
|
if (!language) return false;
|
||||||
|
const normalized = language.trim().toLowerCase();
|
||||||
|
return normalized === 'ja' || normalized === 'jp' || normalized === 'jpn';
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateExpectedLanguage(
|
||||||
|
original: SrtCue[],
|
||||||
|
fixed: SrtCue[],
|
||||||
|
expectedLanguage: string | undefined,
|
||||||
|
): void {
|
||||||
|
if (!isJapaneseLanguageCode(expectedLanguage)) return;
|
||||||
|
|
||||||
|
const originalText = original.map((cue) => cue.text).join('\n');
|
||||||
|
const fixedText = fixed.map((cue) => cue.text).join('\n');
|
||||||
|
const originalJapaneseChars = countPatternMatches(originalText, JAPANESE_CHAR_PATTERN);
|
||||||
|
if (originalJapaneseChars < 4) return;
|
||||||
|
|
||||||
|
const fixedJapaneseChars = countPatternMatches(fixedText, JAPANESE_CHAR_PATTERN);
|
||||||
|
const fixedLatinLetters = countPatternMatches(fixedText, LATIN_LETTER_PATTERN);
|
||||||
|
if (fixedJapaneseChars === 0 && fixedLatinLetters >= 4) {
|
||||||
|
throw new Error('Fixed subtitle batch changed language away from expected Japanese.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseAiSubtitleFixResponse(
|
||||||
|
original: SrtCue[],
|
||||||
|
content: string,
|
||||||
|
expectedLanguage?: string,
|
||||||
|
): SrtCue[] {
|
||||||
|
const candidates = normalizeAiSubtitleFixCandidates(content);
|
||||||
|
let lastError: Error | null = null;
|
||||||
|
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
try {
|
||||||
|
const parsed = parseSrt(candidate);
|
||||||
|
validateExpectedLanguage(original, parsed, expectedLanguage);
|
||||||
|
return parsed;
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error as Error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
try {
|
||||||
|
const parsed = parseTextOnlyCueBatch(original, candidate);
|
||||||
|
validateExpectedLanguage(original, parsed, expectedLanguage);
|
||||||
|
return parsed;
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error as Error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw lastError ?? new Error('AI subtitle fix returned empty content.');
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fixSubtitleWithAi(
|
||||||
|
subtitleContent: string,
|
||||||
|
aiConfig: LauncherAiConfig,
|
||||||
|
logWarning: (message: string) => void,
|
||||||
|
expectedLanguage?: string,
|
||||||
|
): Promise<string | null> {
|
||||||
|
if (aiConfig.enabled !== true) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiKey = await resolveAiApiKey(aiConfig);
|
||||||
|
if (!apiKey) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cues = parseSrt(subtitleContent);
|
||||||
|
if (cues.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fixedChunks: SrtCue[] = [];
|
||||||
|
for (const chunk of chunkCues(cues, 25)) {
|
||||||
|
const fixedContent = await requestAiChatCompletion(
|
||||||
|
{
|
||||||
|
apiKey,
|
||||||
|
baseUrl: aiConfig.baseUrl,
|
||||||
|
model: aiConfig.model,
|
||||||
|
timeoutMs: aiConfig.requestTimeoutMs,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: 'system',
|
||||||
|
content: aiConfig.systemPrompt?.trim() || DEFAULT_SUBTITLE_FIX_PROMPT,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
content: stringifySrt(chunk),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
logWarning,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if (!fixedContent) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsedFixed: SrtCue[];
|
||||||
|
try {
|
||||||
|
parsedFixed = parseAiSubtitleFixResponse(chunk, fixedContent, expectedLanguage);
|
||||||
|
} catch (error) {
|
||||||
|
logWarning(`AI subtitle fix returned invalid SRT: ${(error as Error).message}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
fixedChunks.push(...applyFixedCueBatch(chunk, parsedFixed));
|
||||||
|
} catch (error) {
|
||||||
|
logWarning(`AI subtitle fix validation failed: ${(error as Error).message}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return stringifySrt(fixedChunks);
|
||||||
|
}
|
||||||
47
launcher/youtube/whisper.test.ts
Normal file
47
launcher/youtube/whisper.test.ts
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
|
||||||
|
import { buildWhisperArgs } from './whisper';
|
||||||
|
|
||||||
|
test('buildWhisperArgs includes threads and optional VAD flags', () => {
|
||||||
|
assert.deepEqual(
|
||||||
|
buildWhisperArgs({
|
||||||
|
modelPath: '/models/ggml-large-v2.bin',
|
||||||
|
audioPath: '/tmp/input.wav',
|
||||||
|
outputPrefix: '/tmp/output',
|
||||||
|
language: 'ja',
|
||||||
|
translate: false,
|
||||||
|
threads: 8,
|
||||||
|
vadModelPath: '/models/vad.bin',
|
||||||
|
}),
|
||||||
|
[
|
||||||
|
'-m',
|
||||||
|
'/models/ggml-large-v2.bin',
|
||||||
|
'-f',
|
||||||
|
'/tmp/input.wav',
|
||||||
|
'--output-srt',
|
||||||
|
'--output-file',
|
||||||
|
'/tmp/output',
|
||||||
|
'--language',
|
||||||
|
'ja',
|
||||||
|
'--threads',
|
||||||
|
'8',
|
||||||
|
'-vm',
|
||||||
|
'/models/vad.bin',
|
||||||
|
'--vad',
|
||||||
|
],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('buildWhisperArgs includes translate flag when requested', () => {
|
||||||
|
assert.ok(
|
||||||
|
buildWhisperArgs({
|
||||||
|
modelPath: '/models/base.bin',
|
||||||
|
audioPath: '/tmp/input.wav',
|
||||||
|
outputPrefix: '/tmp/output',
|
||||||
|
language: 'ja',
|
||||||
|
translate: true,
|
||||||
|
threads: 4,
|
||||||
|
}).includes('--translate'),
|
||||||
|
);
|
||||||
|
});
|
||||||
60
launcher/youtube/whisper.ts
Normal file
60
launcher/youtube/whisper.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
|
import type { Args } from '../types.js';
|
||||||
|
import { runExternalCommand } from '../util.js';
|
||||||
|
|
||||||
|
export interface BuildWhisperArgsOptions {
|
||||||
|
modelPath: string;
|
||||||
|
audioPath: string;
|
||||||
|
outputPrefix: string;
|
||||||
|
language: string;
|
||||||
|
translate: boolean;
|
||||||
|
threads: number;
|
||||||
|
vadModelPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildWhisperArgs(options: BuildWhisperArgsOptions): string[] {
|
||||||
|
const args = [
|
||||||
|
'-m',
|
||||||
|
options.modelPath,
|
||||||
|
'-f',
|
||||||
|
options.audioPath,
|
||||||
|
'--output-srt',
|
||||||
|
'--output-file',
|
||||||
|
options.outputPrefix,
|
||||||
|
'--language',
|
||||||
|
options.language,
|
||||||
|
'--threads',
|
||||||
|
String(options.threads),
|
||||||
|
];
|
||||||
|
if (options.translate) args.push('--translate');
|
||||||
|
if (options.vadModelPath) {
|
||||||
|
args.push('-vm', options.vadModelPath, '--vad');
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runWhisper(
|
||||||
|
whisperBin: string,
|
||||||
|
args: Args,
|
||||||
|
options: Omit<BuildWhisperArgsOptions, 'threads' | 'vadModelPath'>,
|
||||||
|
): Promise<string> {
|
||||||
|
const vadModelPath =
|
||||||
|
args.whisperVadModel.trim() && fs.existsSync(args.whisperVadModel.trim())
|
||||||
|
? args.whisperVadModel.trim()
|
||||||
|
: undefined;
|
||||||
|
const whisperArgs = buildWhisperArgs({
|
||||||
|
...options,
|
||||||
|
threads: args.whisperThreads,
|
||||||
|
vadModelPath,
|
||||||
|
});
|
||||||
|
await runExternalCommand(whisperBin, whisperArgs, {
|
||||||
|
commandLabel: 'whisper',
|
||||||
|
streamOutput: true,
|
||||||
|
});
|
||||||
|
const outputPath = `${options.outputPrefix}.srt`;
|
||||||
|
if (!fs.existsSync(outputPath)) {
|
||||||
|
throw new Error(`whisper output not found: ${outputPath}`);
|
||||||
|
}
|
||||||
|
return outputPath;
|
||||||
|
}
|
||||||
67
package.json
67
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "subminer",
|
"name": "subminer",
|
||||||
"version": "0.4.1",
|
"version": "0.5.5",
|
||||||
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
||||||
"packageManager": "bun@1.3.5",
|
"packageManager": "bun@1.3.5",
|
||||||
"main": "dist/main-entry.js",
|
"main": "dist/main-entry.js",
|
||||||
@@ -11,9 +11,15 @@
|
|||||||
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
||||||
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
|
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
|
||||||
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
|
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
|
||||||
"build:yomitan": "node scripts/build-yomitan.mjs",
|
"build:yomitan": "bun scripts/build-yomitan.mjs",
|
||||||
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
|
"build:assets": "bun scripts/prepare-build-assets.mjs",
|
||||||
|
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && bun run build:assets",
|
||||||
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
||||||
|
"changelog:build": "bun run scripts/build-changelog.ts build",
|
||||||
|
"changelog:check": "bun run scripts/build-changelog.ts check",
|
||||||
|
"changelog:lint": "bun run scripts/build-changelog.ts lint",
|
||||||
|
"changelog:pr-check": "bun run scripts/build-changelog.ts pr-check",
|
||||||
|
"changelog:release-notes": "bun run scripts/build-changelog.ts release-notes",
|
||||||
"format": "prettier --write .",
|
"format": "prettier --write .",
|
||||||
"format:check": "prettier --check .",
|
"format:check": "prettier --check .",
|
||||||
"format:src": "bash scripts/prettier-scope.sh --write",
|
"format:src": "bash scripts/prettier-scope.sh --write",
|
||||||
@@ -21,38 +27,41 @@
|
|||||||
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
|
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
|
||||||
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
|
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
|
||||||
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
||||||
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
|
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
|
||||||
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
||||||
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
||||||
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
|
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
|
||||||
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/hyprland-tracker.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js",
|
||||||
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
||||||
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
||||||
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
||||||
"test:immersion:sqlite:src": "bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts",
|
"test:immersion:sqlite:src": "bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts",
|
||||||
"test:immersion:sqlite:dist": "node --experimental-sqlite --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
|
"test:immersion:sqlite:dist": "bun test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
|
||||||
"test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist",
|
"test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist",
|
||||||
"test:src": "node scripts/run-test-lane.mjs bun-src-full",
|
"test:src": "bun scripts/run-test-lane.mjs bun-src-full",
|
||||||
"test:launcher:unit:src": "node scripts/run-test-lane.mjs bun-launcher-unit",
|
"test:launcher:unit:src": "bun scripts/run-test-lane.mjs bun-launcher-unit",
|
||||||
"test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src",
|
"test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src",
|
||||||
"test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src",
|
"test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src",
|
||||||
"test:node:compat": "bun run tsc && node --experimental-sqlite --test dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/overlay-manager.test.js dist/main/config-validation.test.js dist/main/runtime/registry.test.js dist/main/runtime/startup-config.test.js",
|
"test:runtime:compat": "bun run tsc && bun test dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/overlay-manager.test.js dist/main/config-validation.test.js dist/main/runtime/registry.test.js dist/main/runtime/startup-config.test.js",
|
||||||
|
"test:node:compat": "bun run test:runtime:compat",
|
||||||
"test:full": "bun run test:src && bun run test:launcher:unit:src && bun run test:node:compat",
|
"test:full": "bun run test:src && bun run test:launcher:unit:src && bun run test:node:compat",
|
||||||
"test": "bun run test:fast",
|
"test": "bun run test:fast",
|
||||||
"test:config": "bun run test:config:src",
|
"test:config": "bun run test:config:src",
|
||||||
"test:launcher": "bun run test:launcher:src",
|
"test:launcher": "bun run test:launcher:src",
|
||||||
"test:core": "bun run test:core:src",
|
"test:core": "bun run test:core:src",
|
||||||
"test:subtitle": "bun run test:subtitle:src",
|
"test:subtitle": "bun run test:subtitle:src",
|
||||||
"test:fast": "bun run test:config:src && bun run test:core:src && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/release-workflow.test.ts && bun run tsc && node --experimental-sqlite --test dist/main/runtime/registry.test.js",
|
"test:fast": "bun run test:config:src && bun run test:core:src && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/release-workflow.test.ts src/ci-workflow.test.ts scripts/build-changelog.test.ts && bun run tsc && bun test dist/main/runtime/registry.test.js",
|
||||||
"generate:config-example": "bun run build && bun dist/generate-config-example.js",
|
"generate:config-example": "bun run build && bun dist/generate-config-example.js",
|
||||||
"start": "bun run build && electron . --start",
|
"start": "bun run build && electron . --start",
|
||||||
"dev": "bun run build && electron . --start --dev",
|
"dev": "bun run build && electron . --start --dev",
|
||||||
"stop": "electron . --stop",
|
"stop": "electron . --stop",
|
||||||
"toggle": "electron . --toggle",
|
"toggle": "electron . --toggle",
|
||||||
"build:appimage": "bun run build && electron-builder --linux AppImage",
|
"build:appimage": "bun run build && electron-builder --linux AppImage --publish never",
|
||||||
"build:mac": "bun run build && electron-builder --mac dmg zip",
|
"build:mac": "bun run build && electron-builder --mac dmg zip --publish never",
|
||||||
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip",
|
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip --publish never",
|
||||||
"build:mac:zip": "bun run build && electron-builder --mac zip"
|
"build:mac:zip": "bun run build && electron-builder --mac zip --publish never",
|
||||||
|
"build:win": "bun run build && electron-builder --win nsis zip --publish never",
|
||||||
|
"build:win:unsigned": "bun run build && node scripts/build-win-unsigned.mjs"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"anki",
|
"anki",
|
||||||
@@ -71,6 +80,7 @@
|
|||||||
"commander": "^14.0.3",
|
"commander": "^14.0.3",
|
||||||
"discord-rpc": "^4.0.1",
|
"discord-rpc": "^4.0.1",
|
||||||
"jsonc-parser": "^3.3.1",
|
"jsonc-parser": "^3.3.1",
|
||||||
|
"libsql": "^0.5.22",
|
||||||
"ws": "^8.19.0"
|
"ws": "^8.19.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -109,7 +119,26 @@
|
|||||||
"icon": "assets/SubMiner.png",
|
"icon": "assets/SubMiner.png",
|
||||||
"hardenedRuntime": true,
|
"hardenedRuntime": true,
|
||||||
"entitlements": "build/entitlements.mac.plist",
|
"entitlements": "build/entitlements.mac.plist",
|
||||||
"entitlementsInherit": "build/entitlements.mac.plist"
|
"entitlementsInherit": "build/entitlements.mac.plist",
|
||||||
|
"extraResources": [
|
||||||
|
{
|
||||||
|
"from": "dist/scripts/get-mpv-window-macos",
|
||||||
|
"to": "scripts/get-mpv-window-macos"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"win": {
|
||||||
|
"target": [
|
||||||
|
"nsis",
|
||||||
|
"zip"
|
||||||
|
],
|
||||||
|
"icon": "assets/SubMiner.png"
|
||||||
|
},
|
||||||
|
"nsis": {
|
||||||
|
"oneClick": false,
|
||||||
|
"perMachine": false,
|
||||||
|
"allowToChangeInstallationDirectory": true,
|
||||||
|
"include": "build/installer.nsh"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"dist/**/*",
|
"dist/**/*",
|
||||||
@@ -140,8 +169,8 @@
|
|||||||
"to": "plugin/subminer.conf"
|
"to": "plugin/subminer.conf"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"from": "dist/scripts/get-mpv-window-macos",
|
"from": "dist/scripts/get-mpv-window-windows.ps1",
|
||||||
"to": "scripts/get-mpv-window-macos"
|
"to": "scripts/get-mpv-window-windows.ps1"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,10 +4,12 @@
|
|||||||
# Path to SubMiner binary (leave empty for auto-detection)
|
# Path to SubMiner binary (leave empty for auto-detection)
|
||||||
# Auto-detection searches common locations, including:
|
# Auto-detection searches common locations, including:
|
||||||
# - macOS: /Applications/SubMiner.app/Contents/MacOS/SubMiner, ~/Applications/SubMiner.app/Contents/MacOS/SubMiner
|
# - macOS: /Applications/SubMiner.app/Contents/MacOS/SubMiner, ~/Applications/SubMiner.app/Contents/MacOS/SubMiner
|
||||||
|
# - Windows: %LOCALAPPDATA%\Programs\SubMiner\SubMiner.exe, %ProgramFiles%\SubMiner\SubMiner.exe
|
||||||
# - Linux: ~/.local/bin/SubMiner.AppImage, /opt/SubMiner/SubMiner.AppImage, /usr/local/bin/SubMiner, /usr/bin/SubMiner
|
# - Linux: ~/.local/bin/SubMiner.AppImage, /opt/SubMiner/SubMiner.AppImage, /usr/local/bin/SubMiner, /usr/bin/SubMiner
|
||||||
binary_path=
|
binary_path=
|
||||||
|
|
||||||
# Path to mpv IPC socket (must match input-ipc-server in mpv.conf)
|
# Path to mpv IPC socket (must match input-ipc-server in mpv.conf)
|
||||||
|
# Windows installs rewrite this to \\.\pipe\subminer-socket during installation.
|
||||||
socket_path=/tmp/subminer-socket
|
socket_path=/tmp/subminer-socket
|
||||||
|
|
||||||
# Enable texthooker WebSocket server
|
# Enable texthooker WebSocket server
|
||||||
@@ -64,7 +66,7 @@ aniskip_show_button=yes
|
|||||||
aniskip_button_text=You can skip by pressing %s
|
aniskip_button_text=You can skip by pressing %s
|
||||||
|
|
||||||
# Keybinding to execute intro skip when button is visible.
|
# Keybinding to execute intro skip when button is visible.
|
||||||
aniskip_button_key=y-k
|
aniskip_button_key=TAB
|
||||||
|
|
||||||
# OSD hint duration in seconds (shown during first 3s of intro).
|
# OSD hint duration in seconds (shown during first 3s of intro).
|
||||||
aniskip_button_duration=3
|
aniskip_button_duration=3
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
local matcher = require("aniskip_match")
|
local matcher = require("aniskip_match")
|
||||||
|
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
|
||||||
|
|
||||||
function M.create(ctx)
|
function M.create(ctx)
|
||||||
local mp = ctx.mp
|
local mp = ctx.mp
|
||||||
@@ -31,6 +32,18 @@ function M.create(ctx)
|
|||||||
return encoded:gsub(" ", "%%20")
|
return encoded:gsub(" ", "%%20")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
local function is_remote_media_path()
|
||||||
|
local media_path = mp.get_property("path")
|
||||||
|
if type(media_path) ~= "string" then
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
local trimmed = media_path:match("^%s*(.-)%s*$") or ""
|
||||||
|
if trimmed == "" then
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
return trimmed:match("^%a[%w+.-]*://") ~= nil
|
||||||
|
end
|
||||||
|
|
||||||
local function parse_json_payload(text)
|
local function parse_json_payload(text)
|
||||||
if type(text) ~= "string" then
|
if type(text) ~= "string" then
|
||||||
return nil
|
return nil
|
||||||
@@ -452,7 +465,7 @@ function M.create(ctx)
|
|||||||
local intro_start = state.aniskip.intro_start or -1
|
local intro_start = state.aniskip.intro_start or -1
|
||||||
local hint_window_end = intro_start + 3
|
local hint_window_end = intro_start + 3
|
||||||
if in_intro and not state.aniskip.prompt_shown and now >= intro_start and now < hint_window_end then
|
if in_intro and not state.aniskip.prompt_shown and now >= intro_start and now < hint_window_end then
|
||||||
local key = opts.aniskip_button_key ~= "" and opts.aniskip_button_key or "y-k"
|
local key = opts.aniskip_button_key ~= "" and opts.aniskip_button_key or DEFAULT_ANISKIP_BUTTON_KEY
|
||||||
local message = string.format(opts.aniskip_button_text, key)
|
local message = string.format(opts.aniskip_button_text, key)
|
||||||
mp.osd_message(message, tonumber(opts.aniskip_button_duration) or 3)
|
mp.osd_message(message, tonumber(opts.aniskip_button_duration) or 3)
|
||||||
state.aniskip.prompt_shown = true
|
state.aniskip.prompt_shown = true
|
||||||
@@ -523,6 +536,10 @@ function M.create(ctx)
|
|||||||
end
|
end
|
||||||
|
|
||||||
local function should_fetch_aniskip_async(trigger_source, callback)
|
local function should_fetch_aniskip_async(trigger_source, callback)
|
||||||
|
if is_remote_media_path() then
|
||||||
|
callback(false, "remote-url")
|
||||||
|
return
|
||||||
|
end
|
||||||
if trigger_source == "script-message" or trigger_source == "overlay-start" then
|
if trigger_source == "script-message" or trigger_source == "overlay-start" then
|
||||||
callback(true, trigger_source)
|
callback(true, trigger_source)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
|
|
||||||
function M.create(ctx)
|
function M.create(ctx)
|
||||||
|
local mp = ctx.mp
|
||||||
local utils = ctx.utils
|
local utils = ctx.utils
|
||||||
local opts = ctx.opts
|
local opts = ctx.opts
|
||||||
local state = ctx.state
|
local state = ctx.state
|
||||||
@@ -26,6 +27,13 @@ function M.create(ctx)
|
|||||||
end
|
end
|
||||||
|
|
||||||
local function binary_candidates_from_app_path(app_path)
|
local function binary_candidates_from_app_path(app_path)
|
||||||
|
if environment.is_windows() then
|
||||||
|
return {
|
||||||
|
utils.join_path(app_path, "SubMiner.exe"),
|
||||||
|
utils.join_path(app_path, "subminer.exe"),
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
return {
|
return {
|
||||||
utils.join_path(app_path, "Contents", "MacOS", "SubMiner"),
|
utils.join_path(app_path, "Contents", "MacOS", "SubMiner"),
|
||||||
utils.join_path(app_path, "Contents", "MacOS", "subminer"),
|
utils.join_path(app_path, "Contents", "MacOS", "subminer"),
|
||||||
@@ -43,6 +51,11 @@ function M.create(ctx)
|
|||||||
return true
|
return true
|
||||||
end
|
end
|
||||||
|
|
||||||
|
local function directory_exists(path)
|
||||||
|
local info = utils.file_info(path)
|
||||||
|
return info ~= nil and info.is_dir == true
|
||||||
|
end
|
||||||
|
|
||||||
local function resolve_binary_candidate(candidate)
|
local function resolve_binary_candidate(candidate)
|
||||||
local normalized = normalize_binary_path_candidate(candidate)
|
local normalized = normalize_binary_path_candidate(candidate)
|
||||||
if not normalized then
|
if not normalized then
|
||||||
@@ -53,6 +66,25 @@ function M.create(ctx)
|
|||||||
return normalized
|
return normalized
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if environment.is_windows() then
|
||||||
|
if not normalized:lower():match("%.exe$") then
|
||||||
|
local with_exe = normalized .. ".exe"
|
||||||
|
if file_exists(with_exe) then
|
||||||
|
return with_exe
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if directory_exists(normalized) then
|
||||||
|
for _, path in ipairs(binary_candidates_from_app_path(normalized)) do
|
||||||
|
if file_exists(path) then
|
||||||
|
return path
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
if not normalized:lower():find("%.app") then
|
if not normalized:lower():find("%.app") then
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
@@ -75,12 +107,8 @@ function M.create(ctx)
|
|||||||
end
|
end
|
||||||
|
|
||||||
local function find_binary_override()
|
local function find_binary_override()
|
||||||
local candidates = {
|
for _, env_name in ipairs({ "SUBMINER_APPIMAGE_PATH", "SUBMINER_BINARY_PATH" }) do
|
||||||
resolve_binary_candidate(os.getenv("SUBMINER_APPIMAGE_PATH")),
|
local path = resolve_binary_candidate(os.getenv(env_name))
|
||||||
resolve_binary_candidate(os.getenv("SUBMINER_BINARY_PATH")),
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, path in ipairs(candidates) do
|
|
||||||
if path and path ~= "" then
|
if path and path ~= "" then
|
||||||
return path
|
return path
|
||||||
end
|
end
|
||||||
@@ -89,6 +117,109 @@ function M.create(ctx)
|
|||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
local function add_search_path(search_paths, candidate)
|
||||||
|
if type(candidate) == "string" and candidate ~= "" then
|
||||||
|
search_paths[#search_paths + 1] = candidate
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
local function trim_subprocess_stdout(value)
|
||||||
|
if type(value) ~= "string" then
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
local trimmed = value:match("^%s*(.-)%s*$") or ""
|
||||||
|
if trimmed == "" then
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
return trimmed
|
||||||
|
end
|
||||||
|
|
||||||
|
local function find_windows_binary_via_system_lookup()
|
||||||
|
if not environment.is_windows() then
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
if not mp or type(mp.command_native) ~= "function" then
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
local script = [=[
|
||||||
|
function Emit-FirstExistingPath {
|
||||||
|
param([string[]]$Candidates)
|
||||||
|
|
||||||
|
foreach ($candidate in $Candidates) {
|
||||||
|
if ([string]::IsNullOrWhiteSpace($candidate)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (Test-Path -LiteralPath $candidate -PathType Leaf) {
|
||||||
|
Write-Output $candidate
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$runningProcess = Get-CimInstance Win32_Process |
|
||||||
|
Where-Object { $_.Name -ieq 'SubMiner.exe' -or $_.Name -ieq 'subminer.exe' } |
|
||||||
|
Select-Object -First 1 -Property ExecutablePath, CommandLine
|
||||||
|
if ($null -ne $runningProcess) {
|
||||||
|
Emit-FirstExistingPath @($runningProcess.ExecutablePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
$localAppData = [Environment]::GetFolderPath('LocalApplicationData')
|
||||||
|
$programFiles = [Environment]::GetFolderPath('ProgramFiles')
|
||||||
|
$programFilesX86 = ${env:ProgramFiles(x86)}
|
||||||
|
|
||||||
|
Emit-FirstExistingPath @(
|
||||||
|
$(if (-not [string]::IsNullOrWhiteSpace($localAppData)) { Join-Path $localAppData 'Programs\SubMiner\SubMiner.exe' } else { $null }),
|
||||||
|
$(if (-not [string]::IsNullOrWhiteSpace($programFiles)) { Join-Path $programFiles 'SubMiner\SubMiner.exe' } else { $null }),
|
||||||
|
$(if (-not [string]::IsNullOrWhiteSpace($programFilesX86)) { Join-Path $programFilesX86 'SubMiner\SubMiner.exe' } else { $null }),
|
||||||
|
'C:\SubMiner\SubMiner.exe'
|
||||||
|
)
|
||||||
|
|
||||||
|
foreach ($registryPath in @(
|
||||||
|
'HKCU:\Software\Microsoft\Windows\CurrentVersion\App Paths\SubMiner.exe',
|
||||||
|
'HKLM:\Software\Microsoft\Windows\CurrentVersion\App Paths\SubMiner.exe',
|
||||||
|
'HKLM:\Software\WOW6432Node\Microsoft\Windows\CurrentVersion\App Paths\SubMiner.exe'
|
||||||
|
)) {
|
||||||
|
try {
|
||||||
|
$appPath = (Get-ItemProperty -Path $registryPath -ErrorAction Stop).'(default)'
|
||||||
|
Emit-FirstExistingPath @($appPath)
|
||||||
|
} catch {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
$commandPath = Get-Command SubMiner.exe -ErrorAction Stop | Select-Object -First 1 -ExpandProperty Source
|
||||||
|
Emit-FirstExistingPath @($commandPath)
|
||||||
|
} catch {
|
||||||
|
}
|
||||||
|
]=]
|
||||||
|
|
||||||
|
local result = mp.command_native({
|
||||||
|
name = "subprocess",
|
||||||
|
args = {
|
||||||
|
"powershell.exe",
|
||||||
|
"-NoProfile",
|
||||||
|
"-ExecutionPolicy",
|
||||||
|
"Bypass",
|
||||||
|
"-Command",
|
||||||
|
script,
|
||||||
|
},
|
||||||
|
playback_only = false,
|
||||||
|
capture_stdout = true,
|
||||||
|
capture_stderr = false,
|
||||||
|
})
|
||||||
|
if not result or result.status ~= 0 then
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
local candidate = trim_subprocess_stdout(result.stdout)
|
||||||
|
if not candidate then
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
return resolve_binary_candidate(candidate)
|
||||||
|
end
|
||||||
|
|
||||||
local function find_binary()
|
local function find_binary()
|
||||||
local override = find_binary_override()
|
local override = find_binary_override()
|
||||||
if override then
|
if override then
|
||||||
@@ -100,17 +231,34 @@ function M.create(ctx)
|
|||||||
return configured
|
return configured
|
||||||
end
|
end
|
||||||
|
|
||||||
local search_paths = {
|
local system_lookup_binary = find_windows_binary_via_system_lookup()
|
||||||
"/Applications/SubMiner.app/Contents/MacOS/SubMiner",
|
if system_lookup_binary then
|
||||||
utils.join_path(os.getenv("HOME") or "", "Applications/SubMiner.app/Contents/MacOS/SubMiner"),
|
subminer_log("info", "binary", "Found Windows binary via system lookup at: " .. system_lookup_binary)
|
||||||
"C:\\Program Files\\SubMiner\\SubMiner.exe",
|
return system_lookup_binary
|
||||||
"C:\\Program Files (x86)\\SubMiner\\SubMiner.exe",
|
end
|
||||||
"C:\\SubMiner\\SubMiner.exe",
|
|
||||||
utils.join_path(os.getenv("HOME") or "", ".local/bin/SubMiner.AppImage"),
|
local home = os.getenv("HOME") or os.getenv("USERPROFILE") or ""
|
||||||
"/opt/SubMiner/SubMiner.AppImage",
|
local app_data = os.getenv("APPDATA") or ""
|
||||||
"/usr/local/bin/SubMiner",
|
local app_data_local = app_data ~= "" and app_data:gsub("[/\\][Rr][Oo][Aa][Mm][Ii][Nn][Gg]$", "\\Local") or ""
|
||||||
"/usr/bin/SubMiner",
|
local local_app_data = os.getenv("LOCALAPPDATA") or utils.join_path(home, "AppData", "Local")
|
||||||
}
|
local program_files = os.getenv("ProgramFiles") or "C:\\Program Files"
|
||||||
|
local program_files_x86 = os.getenv("ProgramFiles(x86)") or "C:\\Program Files (x86)"
|
||||||
|
local search_paths = {}
|
||||||
|
|
||||||
|
if environment.is_windows() then
|
||||||
|
add_search_path(search_paths, utils.join_path(app_data_local, "Programs", "SubMiner", "SubMiner.exe"))
|
||||||
|
add_search_path(search_paths, utils.join_path(local_app_data, "Programs", "SubMiner", "SubMiner.exe"))
|
||||||
|
add_search_path(search_paths, utils.join_path(program_files, "SubMiner", "SubMiner.exe"))
|
||||||
|
add_search_path(search_paths, utils.join_path(program_files_x86, "SubMiner", "SubMiner.exe"))
|
||||||
|
add_search_path(search_paths, "C:\\SubMiner\\SubMiner.exe")
|
||||||
|
else
|
||||||
|
add_search_path(search_paths, "/Applications/SubMiner.app/Contents/MacOS/SubMiner")
|
||||||
|
add_search_path(search_paths, utils.join_path(home, "Applications", "SubMiner.app", "Contents", "MacOS", "SubMiner"))
|
||||||
|
add_search_path(search_paths, utils.join_path(home, ".local", "bin", "SubMiner.AppImage"))
|
||||||
|
add_search_path(search_paths, "/opt/SubMiner/SubMiner.AppImage")
|
||||||
|
add_search_path(search_paths, "/usr/local/bin/SubMiner")
|
||||||
|
add_search_path(search_paths, "/usr/bin/SubMiner")
|
||||||
|
end
|
||||||
|
|
||||||
for _, path in ipairs(search_paths) do
|
for _, path in ipairs(search_paths) do
|
||||||
if file_exists(path) then
|
if file_exists(path) then
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
|
local BOOTSTRAP_GUARD_KEY = "__subminer_plugin_bootstrapped"
|
||||||
|
|
||||||
function M.init()
|
function M.init()
|
||||||
|
if rawget(_G, BOOTSTRAP_GUARD_KEY) == true then
|
||||||
|
return
|
||||||
|
end
|
||||||
|
rawset(_G, BOOTSTRAP_GUARD_KEY, true)
|
||||||
|
|
||||||
local input = require("mp.input")
|
local input = require("mp.input")
|
||||||
local mp = require("mp")
|
local mp = require("mp")
|
||||||
local msg = require("mp.msg")
|
local msg = require("mp.msg")
|
||||||
|
|||||||
@@ -61,10 +61,9 @@ function M.create(ctx)
|
|||||||
aniskip.clear_aniskip_state()
|
aniskip.clear_aniskip_state()
|
||||||
hover.clear_hover_overlay()
|
hover.clear_hover_overlay()
|
||||||
process.disarm_auto_play_ready_gate()
|
process.disarm_auto_play_ready_gate()
|
||||||
if state.overlay_running or state.texthooker_running then
|
if state.overlay_running then
|
||||||
subminer_log("info", "lifecycle", "mpv shutting down, stopping SubMiner process")
|
subminer_log("info", "lifecycle", "mpv shutting down, hiding SubMiner overlay")
|
||||||
show_osd("Shutting down...")
|
process.hide_visible_overlay()
|
||||||
process.stop_overlay()
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -77,6 +76,9 @@ function M.create(ctx)
|
|||||||
mp.register_event("end-file", function()
|
mp.register_event("end-file", function()
|
||||||
process.disarm_auto_play_ready_gate()
|
process.disarm_auto_play_ready_gate()
|
||||||
hover.clear_hover_overlay()
|
hover.clear_hover_overlay()
|
||||||
|
if state.overlay_running then
|
||||||
|
process.hide_visible_overlay()
|
||||||
|
end
|
||||||
end)
|
end)
|
||||||
mp.register_event("shutdown", function()
|
mp.register_event("shutdown", function()
|
||||||
hover.clear_hover_overlay()
|
hover.clear_hover_overlay()
|
||||||
|
|||||||
@@ -22,4 +22,9 @@ if not package.path:find(module_patterns, 1, true) then
|
|||||||
package.path = module_patterns .. package.path
|
package.path = module_patterns .. package.path
|
||||||
end
|
end
|
||||||
|
|
||||||
require("init").init()
|
local init_module = assert(loadfile(script_dir .. "/init.lua"))()
|
||||||
|
if type(init_module) == "table" and type(init_module.init) == "function" then
|
||||||
|
init_module.init()
|
||||||
|
elseif type(init_module) == "function" then
|
||||||
|
init_module()
|
||||||
|
end
|
||||||
|
|||||||
@@ -1,4 +1,27 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
|
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
|
||||||
|
|
||||||
|
local function normalize_socket_path_option(socket_path, default_socket_path)
|
||||||
|
if type(default_socket_path) ~= "string" then
|
||||||
|
return socket_path
|
||||||
|
end
|
||||||
|
|
||||||
|
local trimmed_default = default_socket_path:match("^%s*(.-)%s*$")
|
||||||
|
local trimmed_socket = type(socket_path) == "string" and socket_path:match("^%s*(.-)%s*$") or socket_path
|
||||||
|
if trimmed_default ~= "\\\\.\\pipe\\subminer-socket" then
|
||||||
|
return trimmed_socket
|
||||||
|
end
|
||||||
|
if type(trimmed_socket) ~= "string" or trimmed_socket == "" then
|
||||||
|
return trimmed_default
|
||||||
|
end
|
||||||
|
if trimmed_socket == "/tmp/subminer-socket" or trimmed_socket == "\\tmp\\subminer-socket" then
|
||||||
|
return trimmed_default
|
||||||
|
end
|
||||||
|
if trimmed_socket == "\\\\.\\pipe\\tmp\\subminer-socket" then
|
||||||
|
return trimmed_default
|
||||||
|
end
|
||||||
|
return trimmed_socket
|
||||||
|
end
|
||||||
|
|
||||||
function M.load(options_lib, default_socket_path)
|
function M.load(options_lib, default_socket_path)
|
||||||
local opts = {
|
local opts = {
|
||||||
@@ -20,11 +43,12 @@ function M.load(options_lib, default_socket_path)
|
|||||||
aniskip_payload = "",
|
aniskip_payload = "",
|
||||||
aniskip_show_button = true,
|
aniskip_show_button = true,
|
||||||
aniskip_button_text = "You can skip by pressing %s",
|
aniskip_button_text = "You can skip by pressing %s",
|
||||||
aniskip_button_key = "y-k",
|
aniskip_button_key = DEFAULT_ANISKIP_BUTTON_KEY,
|
||||||
aniskip_button_duration = 3,
|
aniskip_button_duration = 3,
|
||||||
}
|
}
|
||||||
|
|
||||||
options_lib.read_options(opts, "subminer")
|
options_lib.read_options(opts, "subminer")
|
||||||
|
opts.socket_path = normalize_socket_path_option(opts.socket_path, default_socket_path)
|
||||||
return opts
|
return opts
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -411,6 +411,28 @@ function M.create(ctx)
|
|||||||
show_osd("Stopped")
|
show_osd("Stopped")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
local function hide_visible_overlay()
|
||||||
|
if not binary.ensure_binary_available() then
|
||||||
|
subminer_log("error", "binary", "SubMiner binary not found")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
run_control_command_async("hide-visible-overlay", nil, function(ok, result)
|
||||||
|
if ok then
|
||||||
|
subminer_log("info", "process", "Visible overlay hidden")
|
||||||
|
else
|
||||||
|
subminer_log(
|
||||||
|
"warn",
|
||||||
|
"process",
|
||||||
|
"Hide-visible-overlay command returned non-zero status: "
|
||||||
|
.. tostring(result and result.status or "unknown")
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
disarm_auto_play_ready_gate()
|
||||||
|
end
|
||||||
|
|
||||||
local function toggle_overlay()
|
local function toggle_overlay()
|
||||||
if not binary.ensure_binary_available() then
|
if not binary.ensure_binary_available() then
|
||||||
subminer_log("error", "binary", "SubMiner binary not found")
|
subminer_log("error", "binary", "SubMiner binary not found")
|
||||||
@@ -511,6 +533,7 @@ function M.create(ctx)
|
|||||||
start_overlay = start_overlay,
|
start_overlay = start_overlay,
|
||||||
start_overlay_from_script_message = start_overlay_from_script_message,
|
start_overlay_from_script_message = start_overlay_from_script_message,
|
||||||
stop_overlay = stop_overlay,
|
stop_overlay = stop_overlay,
|
||||||
|
hide_visible_overlay = hide_visible_overlay,
|
||||||
toggle_overlay = toggle_overlay,
|
toggle_overlay = toggle_overlay,
|
||||||
open_options = open_options,
|
open_options = open_options,
|
||||||
restart_overlay = restart_overlay,
|
restart_overlay = restart_overlay,
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
|
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
|
||||||
|
local LEGACY_ANISKIP_BUTTON_KEY = "y-k"
|
||||||
|
|
||||||
function M.create(ctx)
|
function M.create(ctx)
|
||||||
local mp = ctx.mp
|
local mp = ctx.mp
|
||||||
@@ -89,8 +91,11 @@ function M.create(ctx)
|
|||||||
aniskip.skip_intro_now()
|
aniskip.skip_intro_now()
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
if opts.aniskip_button_key ~= "y-k" then
|
if
|
||||||
mp.add_key_binding("y-k", "subminer-skip-intro-fallback", function()
|
opts.aniskip_button_key ~= LEGACY_ANISKIP_BUTTON_KEY
|
||||||
|
and opts.aniskip_button_key ~= DEFAULT_ANISKIP_BUTTON_KEY
|
||||||
|
then
|
||||||
|
mp.add_key_binding(LEGACY_ANISKIP_BUTTON_KEY, "subminer-skip-intro-fallback", function()
|
||||||
aniskip.skip_intro_now()
|
aniskip.skip_intro_now()
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|||||||
225
scripts/build-changelog.test.ts
Normal file
225
scripts/build-changelog.test.ts
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
import test from 'node:test';
|
||||||
|
|
||||||
|
async function loadModule() {
|
||||||
|
return import('./build-changelog');
|
||||||
|
}
|
||||||
|
|
||||||
|
function createWorkspace(name: string): string {
|
||||||
|
const baseDir = path.join(process.cwd(), '.tmp', 'build-changelog-test');
|
||||||
|
fs.mkdirSync(baseDir, { recursive: true });
|
||||||
|
return fs.mkdtempSync(path.join(baseDir, `${name}-`));
|
||||||
|
}
|
||||||
|
|
||||||
|
test('resolveChangelogOutputPaths stays repo-local and never writes docs paths', async () => {
|
||||||
|
const { resolveChangelogOutputPaths } = await loadModule();
|
||||||
|
const workspace = createWorkspace('with-docs-repo');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
|
||||||
|
fs.mkdirSync(projectRoot, { recursive: true });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const outputPaths = resolveChangelogOutputPaths({ cwd: projectRoot });
|
||||||
|
|
||||||
|
assert.deepEqual(outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
|
||||||
|
assert.equal(outputPaths.includes(path.join(projectRoot, 'docs', 'changelog.md')), false);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('writeChangelogArtifacts ignores README, groups fragments by type, writes release notes, and deletes only fragment files', async () => {
|
||||||
|
const { writeChangelogArtifacts } = await loadModule();
|
||||||
|
const workspace = createWorkspace('write-artifacts');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
const existingChangelog = [
|
||||||
|
'# Changelog',
|
||||||
|
'',
|
||||||
|
'## v0.4.0 (2026-03-01)',
|
||||||
|
'- Existing fix',
|
||||||
|
'',
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
fs.mkdirSync(projectRoot, { recursive: true });
|
||||||
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8');
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', 'README.md'),
|
||||||
|
'# Changelog Fragments\n\nIgnored helper text.\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', '001.md'),
|
||||||
|
['type: added', 'area: overlay', '', '- Added release fragments.'].join('\n'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', '002.md'),
|
||||||
|
['type: fixed', 'area: release', '', 'Fixed release notes generation.'].join('\n'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = writeChangelogArtifacts({
|
||||||
|
cwd: projectRoot,
|
||||||
|
version: '0.4.1',
|
||||||
|
date: '2026-03-07',
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(result.outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
|
||||||
|
assert.deepEqual(result.deletedFragmentPaths, [
|
||||||
|
path.join(projectRoot, 'changes', '001.md'),
|
||||||
|
path.join(projectRoot, 'changes', '002.md'),
|
||||||
|
]);
|
||||||
|
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '001.md')), false);
|
||||||
|
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '002.md')), false);
|
||||||
|
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', 'README.md')), true);
|
||||||
|
|
||||||
|
const changelog = fs.readFileSync(path.join(projectRoot, 'CHANGELOG.md'), 'utf8');
|
||||||
|
assert.match(
|
||||||
|
changelog,
|
||||||
|
/^# Changelog\n\n## v0\.4\.1 \(2026-03-07\)\n\n### Added\n- Overlay: Added release fragments\.\n\n### Fixed\n- Release: Fixed release notes generation\.\n\n## v0\.4\.0 \(2026-03-01\)\n- Existing fix\n$/m,
|
||||||
|
);
|
||||||
|
|
||||||
|
const releaseNotes = fs.readFileSync(
|
||||||
|
path.join(projectRoot, 'release', 'release-notes.md'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
assert.match(releaseNotes, /## Highlights\n### Added\n- Overlay: Added release fragments\./);
|
||||||
|
assert.match(releaseNotes, /### Fixed\n- Release: Fixed release notes generation\./);
|
||||||
|
assert.match(releaseNotes, /## Installation\n\nSee the README and docs\/installation guide/);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('verifyChangelogReadyForRelease ignores README but rejects pending fragments and missing version sections', async () => {
|
||||||
|
const { verifyChangelogReadyForRelease } = await loadModule();
|
||||||
|
const workspace = createWorkspace('verify-release');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
|
||||||
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), '# Changelog\n', 'utf8');
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', 'README.md'),
|
||||||
|
'# Changelog Fragments\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), '- Pending fragment.\n', 'utf8');
|
||||||
|
|
||||||
|
try {
|
||||||
|
assert.throws(
|
||||||
|
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
|
||||||
|
/Pending changelog fragments/,
|
||||||
|
);
|
||||||
|
|
||||||
|
fs.rmSync(path.join(projectRoot, 'changes', '001.md'));
|
||||||
|
|
||||||
|
assert.throws(
|
||||||
|
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
|
||||||
|
/Missing CHANGELOG section for v0\.4\.1/,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('verifyChangelogReadyForRelease rejects explicit release versions that do not match package.json', async () => {
|
||||||
|
const { verifyChangelogReadyForRelease } = await loadModule();
|
||||||
|
const workspace = createWorkspace('verify-release-version-match');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
|
||||||
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'package.json'),
|
||||||
|
JSON.stringify({ name: 'subminer', version: '0.4.0' }, null, 2),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'CHANGELOG.md'),
|
||||||
|
'# Changelog\n\n## v0.4.1 (2026-03-09)\n- Ready.\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
assert.throws(
|
||||||
|
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
|
||||||
|
/package\.json version \(0\.4\.0\) does not match requested release version \(0\.4\.1\)/,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('verifyChangelogFragments rejects invalid metadata', async () => {
|
||||||
|
const { verifyChangelogFragments } = await loadModule();
|
||||||
|
const workspace = createWorkspace('lint-invalid');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
|
||||||
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', '001.md'),
|
||||||
|
['type: nope', 'area: overlay', '', '- Invalid type.'].join('\n'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
assert.throws(
|
||||||
|
() => verifyChangelogFragments({ cwd: projectRoot }),
|
||||||
|
/must declare type as one of/,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('verifyPullRequestChangelog requires fragments for user-facing changes and skips docs-only changes', async () => {
|
||||||
|
const { verifyPullRequestChangelog } = await loadModule();
|
||||||
|
|
||||||
|
assert.throws(
|
||||||
|
() =>
|
||||||
|
verifyPullRequestChangelog({
|
||||||
|
changedEntries: [{ path: 'src/main-entry.ts', status: 'M' }],
|
||||||
|
changedLabels: [],
|
||||||
|
}),
|
||||||
|
/requires a changelog fragment/,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.doesNotThrow(() =>
|
||||||
|
verifyPullRequestChangelog({
|
||||||
|
changedEntries: [{ path: 'docs/RELEASING.md', status: 'M' }],
|
||||||
|
changedLabels: [],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.doesNotThrow(() =>
|
||||||
|
verifyPullRequestChangelog({
|
||||||
|
changedEntries: [{ path: 'src/main-entry.ts', status: 'M' }],
|
||||||
|
changedLabels: ['skip-changelog'],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.throws(
|
||||||
|
() =>
|
||||||
|
verifyPullRequestChangelog({
|
||||||
|
changedEntries: [
|
||||||
|
{ path: 'src/main-entry.ts', status: 'M' },
|
||||||
|
{ path: 'changes/001.md', status: 'D' },
|
||||||
|
],
|
||||||
|
changedLabels: [],
|
||||||
|
}),
|
||||||
|
/requires a changelog fragment/,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.doesNotThrow(() =>
|
||||||
|
verifyPullRequestChangelog({
|
||||||
|
changedEntries: [
|
||||||
|
{ path: 'src/main-entry.ts', status: 'M' },
|
||||||
|
{ path: 'changes/001.md', status: 'A' },
|
||||||
|
],
|
||||||
|
changedLabels: [],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
589
scripts/build-changelog.ts
Normal file
589
scripts/build-changelog.ts
Normal file
@@ -0,0 +1,589 @@
|
|||||||
|
import * as fs from 'node:fs';
|
||||||
|
import * as path from 'node:path';
|
||||||
|
import { execFileSync } from 'node:child_process';
|
||||||
|
|
||||||
|
type ChangelogFsDeps = {
|
||||||
|
existsSync?: (candidate: string) => boolean;
|
||||||
|
mkdirSync?: (candidate: string, options: { recursive: true }) => void;
|
||||||
|
readFileSync?: (candidate: string, encoding: BufferEncoding) => string;
|
||||||
|
readdirSync?: (candidate: string, options: { withFileTypes: true }) => fs.Dirent[];
|
||||||
|
rmSync?: (candidate: string) => void;
|
||||||
|
writeFileSync?: (candidate: string, content: string, encoding: BufferEncoding) => void;
|
||||||
|
log?: (message: string) => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ChangelogOptions = {
|
||||||
|
cwd?: string;
|
||||||
|
date?: string;
|
||||||
|
version?: string;
|
||||||
|
deps?: ChangelogFsDeps;
|
||||||
|
};
|
||||||
|
|
||||||
|
type FragmentType = 'added' | 'changed' | 'fixed' | 'docs' | 'internal';
|
||||||
|
|
||||||
|
type ChangeFragment = {
|
||||||
|
area: string;
|
||||||
|
bullets: string[];
|
||||||
|
path: string;
|
||||||
|
type: FragmentType;
|
||||||
|
};
|
||||||
|
|
||||||
|
type PullRequestChangelogOptions = {
|
||||||
|
changedEntries: Array<{
|
||||||
|
path: string;
|
||||||
|
status: string;
|
||||||
|
}>;
|
||||||
|
changedLabels?: string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
const RELEASE_NOTES_PATH = path.join('release', 'release-notes.md');
|
||||||
|
const CHANGELOG_HEADER = '# Changelog';
|
||||||
|
const CHANGE_TYPES: FragmentType[] = ['added', 'changed', 'fixed', 'docs', 'internal'];
|
||||||
|
const CHANGE_TYPE_HEADINGS: Record<FragmentType, string> = {
|
||||||
|
added: 'Added',
|
||||||
|
changed: 'Changed',
|
||||||
|
fixed: 'Fixed',
|
||||||
|
docs: 'Docs',
|
||||||
|
internal: 'Internal',
|
||||||
|
};
|
||||||
|
const SKIP_CHANGELOG_LABEL = 'skip-changelog';
|
||||||
|
|
||||||
|
function normalizeVersion(version: string): string {
|
||||||
|
return version.replace(/^v/, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveDate(date?: string): string {
|
||||||
|
return date ?? new Date().toISOString().slice(0, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolvePackageVersion(
|
||||||
|
cwd: string,
|
||||||
|
readFileSync: (candidate: string, encoding: BufferEncoding) => string,
|
||||||
|
): string {
|
||||||
|
const packageJsonPath = path.join(cwd, 'package.json');
|
||||||
|
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as { version?: string };
|
||||||
|
if (!packageJson.version) {
|
||||||
|
throw new Error(`Missing package.json version at ${packageJsonPath}`);
|
||||||
|
}
|
||||||
|
return normalizeVersion(packageJson.version);
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveVersion(options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>): string {
|
||||||
|
const cwd = options.cwd ?? process.cwd();
|
||||||
|
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
return normalizeVersion(options.version ?? resolvePackageVersion(cwd, readFileSync));
|
||||||
|
}
|
||||||
|
|
||||||
|
function verifyRequestedVersionMatchesPackageVersion(
|
||||||
|
options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>,
|
||||||
|
): void {
|
||||||
|
if (!options.version) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cwd = options.cwd ?? process.cwd();
|
||||||
|
const existsSync = options.deps?.existsSync ?? fs.existsSync;
|
||||||
|
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
const packageJsonPath = path.join(cwd, 'package.json');
|
||||||
|
if (!existsSync(packageJsonPath)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const packageVersion = resolvePackageVersion(cwd, readFileSync);
|
||||||
|
const requestedVersion = normalizeVersion(options.version);
|
||||||
|
|
||||||
|
if (packageVersion !== requestedVersion) {
|
||||||
|
throw new Error(
|
||||||
|
`package.json version (${packageVersion}) does not match requested release version (${requestedVersion}).`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveChangesDir(cwd: string): string {
|
||||||
|
return path.join(cwd, 'changes');
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveFragmentPaths(cwd: string, deps?: ChangelogFsDeps): string[] {
|
||||||
|
const changesDir = resolveChangesDir(cwd);
|
||||||
|
const existsSync = deps?.existsSync ?? fs.existsSync;
|
||||||
|
const readdirSync = deps?.readdirSync ?? fs.readdirSync;
|
||||||
|
|
||||||
|
if (!existsSync(changesDir)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return readdirSync(changesDir, { withFileTypes: true })
|
||||||
|
.filter(
|
||||||
|
(entry) =>
|
||||||
|
entry.isFile() && entry.name.endsWith('.md') && entry.name.toLowerCase() !== 'readme.md',
|
||||||
|
)
|
||||||
|
.map((entry) => path.join(changesDir, entry.name))
|
||||||
|
.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeFragmentBullets(content: string): string[] {
|
||||||
|
const lines = content
|
||||||
|
.split(/\r?\n/)
|
||||||
|
.map((line) => line.trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
.map((line) => {
|
||||||
|
const match = /^[-*]\s+(.*)$/.exec(line);
|
||||||
|
return `- ${(match?.[1] ?? line).trim()}`;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (lines.length === 0) {
|
||||||
|
throw new Error('Changelog fragment cannot be empty.');
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseFragmentMetadata(
|
||||||
|
content: string,
|
||||||
|
fragmentPath: string,
|
||||||
|
): {
|
||||||
|
area: string;
|
||||||
|
body: string;
|
||||||
|
type: FragmentType;
|
||||||
|
} {
|
||||||
|
const lines = content.split(/\r?\n/);
|
||||||
|
let index = 0;
|
||||||
|
|
||||||
|
while (index < lines.length && !(lines[index] ?? '').trim()) {
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = new Map<string, string>();
|
||||||
|
while (index < lines.length) {
|
||||||
|
const trimmed = (lines[index] ?? '').trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
index += 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const match = /^([a-z]+):\s*(.+)$/.exec(trimmed);
|
||||||
|
if (!match) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, rawKey = '', rawValue = ''] = match;
|
||||||
|
metadata.set(rawKey, rawValue.trim());
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const type = metadata.get('type');
|
||||||
|
if (!type || !CHANGE_TYPES.includes(type as FragmentType)) {
|
||||||
|
throw new Error(`${fragmentPath} must declare type as one of: ${CHANGE_TYPES.join(', ')}.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const area = metadata.get('area');
|
||||||
|
if (!area) {
|
||||||
|
throw new Error(`${fragmentPath} must declare area.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = lines.slice(index).join('\n').trim();
|
||||||
|
if (!body) {
|
||||||
|
throw new Error(`${fragmentPath} must include at least one changelog bullet.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
area,
|
||||||
|
body,
|
||||||
|
type: type as FragmentType,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function readChangeFragments(cwd: string, deps?: ChangelogFsDeps): ChangeFragment[] {
|
||||||
|
const readFileSync = deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
return resolveFragmentPaths(cwd, deps).map((fragmentPath) => {
|
||||||
|
const parsed = parseFragmentMetadata(readFileSync(fragmentPath, 'utf8'), fragmentPath);
|
||||||
|
return {
|
||||||
|
area: parsed.area,
|
||||||
|
bullets: normalizeFragmentBullets(parsed.body),
|
||||||
|
path: fragmentPath,
|
||||||
|
type: parsed.type,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatAreaLabel(area: string): string {
|
||||||
|
return area
|
||||||
|
.split(/[-_\s]+/)
|
||||||
|
.filter(Boolean)
|
||||||
|
.map((segment) => segment.charAt(0).toUpperCase() + segment.slice(1))
|
||||||
|
.join(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderFragmentBullet(fragment: ChangeFragment, bullet: string): string {
|
||||||
|
return `- ${formatAreaLabel(fragment.area)}: ${bullet.replace(/^- /, '')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderGroupedChanges(fragments: ChangeFragment[]): string {
|
||||||
|
const sections = CHANGE_TYPES.flatMap((type) => {
|
||||||
|
const typeFragments = fragments.filter((fragment) => fragment.type === type);
|
||||||
|
if (typeFragments.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const bullets = typeFragments
|
||||||
|
.flatMap((fragment) =>
|
||||||
|
fragment.bullets.map((bullet) => renderFragmentBullet(fragment, bullet)),
|
||||||
|
)
|
||||||
|
.join('\n');
|
||||||
|
return [`### ${CHANGE_TYPE_HEADINGS[type]}\n${bullets}`];
|
||||||
|
});
|
||||||
|
|
||||||
|
return sections.join('\n\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildReleaseSection(version: string, date: string, fragments: ChangeFragment[]): string {
|
||||||
|
if (fragments.length === 0) {
|
||||||
|
throw new Error('No changelog fragments found in changes/.');
|
||||||
|
}
|
||||||
|
|
||||||
|
return [`## v${version} (${date})`, '', renderGroupedChanges(fragments), ''].join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
function ensureChangelogHeader(existingChangelog: string): string {
|
||||||
|
const trimmed = existingChangelog.trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return `${CHANGELOG_HEADER}\n`;
|
||||||
|
}
|
||||||
|
if (trimmed.startsWith(CHANGELOG_HEADER)) {
|
||||||
|
return `${trimmed}\n`;
|
||||||
|
}
|
||||||
|
return `${CHANGELOG_HEADER}\n\n${trimmed}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function prependReleaseSection(
|
||||||
|
existingChangelog: string,
|
||||||
|
releaseSection: string,
|
||||||
|
version: string,
|
||||||
|
): string {
|
||||||
|
const normalizedExisting = ensureChangelogHeader(existingChangelog);
|
||||||
|
if (extractReleaseSectionBody(normalizedExisting, version) !== null) {
|
||||||
|
throw new Error(`CHANGELOG already contains a section for v${version}.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const withoutHeader = normalizedExisting.replace(/^# Changelog\s*/, '').trimStart();
|
||||||
|
const body = [releaseSection.trimEnd(), withoutHeader.trimEnd()].filter(Boolean).join('\n\n');
|
||||||
|
return `${CHANGELOG_HEADER}\n\n${body}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function escapeRegExp(value: string): string {
|
||||||
|
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractReleaseSectionBody(changelog: string, version: string): string | null {
|
||||||
|
const headingPattern = new RegExp(
|
||||||
|
`^## v${escapeRegExp(normalizeVersion(version))} \\([^\\n]+\\)$`,
|
||||||
|
'm',
|
||||||
|
);
|
||||||
|
const headingMatch = headingPattern.exec(changelog);
|
||||||
|
if (!headingMatch) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bodyStart = headingMatch.index + headingMatch[0].length + 1;
|
||||||
|
const remaining = changelog.slice(bodyStart);
|
||||||
|
const nextHeadingMatch = /^## /m.exec(remaining);
|
||||||
|
const body = nextHeadingMatch ? remaining.slice(0, nextHeadingMatch.index) : remaining;
|
||||||
|
return body.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveChangelogOutputPaths(options?: { cwd?: string }): string[] {
|
||||||
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
|
return [path.join(cwd, 'CHANGELOG.md')];
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderReleaseNotes(changes: string): string {
|
||||||
|
return [
|
||||||
|
'## Highlights',
|
||||||
|
changes,
|
||||||
|
'',
|
||||||
|
'## Installation',
|
||||||
|
'',
|
||||||
|
'See the README and docs/installation guide for full setup steps.',
|
||||||
|
'',
|
||||||
|
'## Assets',
|
||||||
|
'',
|
||||||
|
'- Linux: `SubMiner.AppImage`',
|
||||||
|
'- macOS: `SubMiner-*.dmg` and `SubMiner-*.zip`',
|
||||||
|
'- Optional extras: `subminer-assets.tar.gz` and the `subminer` launcher',
|
||||||
|
'',
|
||||||
|
'Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.',
|
||||||
|
'',
|
||||||
|
].join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeReleaseNotesFile(cwd: string, changes: string, deps?: ChangelogFsDeps): string {
|
||||||
|
const mkdirSync = deps?.mkdirSync ?? fs.mkdirSync;
|
||||||
|
const writeFileSync = deps?.writeFileSync ?? fs.writeFileSync;
|
||||||
|
const releaseNotesPath = path.join(cwd, RELEASE_NOTES_PATH);
|
||||||
|
|
||||||
|
mkdirSync(path.dirname(releaseNotesPath), { recursive: true });
|
||||||
|
writeFileSync(releaseNotesPath, renderReleaseNotes(changes), 'utf8');
|
||||||
|
return releaseNotesPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function writeChangelogArtifacts(options?: ChangelogOptions): {
|
||||||
|
deletedFragmentPaths: string[];
|
||||||
|
outputPaths: string[];
|
||||||
|
releaseNotesPath: string;
|
||||||
|
} {
|
||||||
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
|
const existsSync = options?.deps?.existsSync ?? fs.existsSync;
|
||||||
|
const mkdirSync = options?.deps?.mkdirSync ?? fs.mkdirSync;
|
||||||
|
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
const rmSync = options?.deps?.rmSync ?? fs.rmSync;
|
||||||
|
const writeFileSync = options?.deps?.writeFileSync ?? fs.writeFileSync;
|
||||||
|
const log = options?.deps?.log ?? console.log;
|
||||||
|
const version = resolveVersion(options ?? {});
|
||||||
|
const date = resolveDate(options?.date);
|
||||||
|
const fragments = readChangeFragments(cwd, options?.deps);
|
||||||
|
const releaseSection = buildReleaseSection(version, date, fragments);
|
||||||
|
const existingChangelogPath = path.join(cwd, 'CHANGELOG.md');
|
||||||
|
const existingChangelog = existsSync(existingChangelogPath)
|
||||||
|
? readFileSync(existingChangelogPath, 'utf8')
|
||||||
|
: '';
|
||||||
|
const outputPaths = resolveChangelogOutputPaths({ cwd });
|
||||||
|
const nextChangelog = prependReleaseSection(existingChangelog, releaseSection, version);
|
||||||
|
|
||||||
|
for (const outputPath of outputPaths) {
|
||||||
|
mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||||
|
writeFileSync(outputPath, nextChangelog, 'utf8');
|
||||||
|
log(`Updated ${outputPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const releaseNotesPath = writeReleaseNotesFile(
|
||||||
|
cwd,
|
||||||
|
extractReleaseSectionBody(nextChangelog, version) ?? releaseSection,
|
||||||
|
options?.deps,
|
||||||
|
);
|
||||||
|
log(`Generated ${releaseNotesPath}`);
|
||||||
|
|
||||||
|
for (const fragment of fragments) {
|
||||||
|
rmSync(fragment.path);
|
||||||
|
log(`Removed ${fragment.path}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
deletedFragmentPaths: fragments.map((fragment) => fragment.path),
|
||||||
|
outputPaths,
|
||||||
|
releaseNotesPath,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function verifyChangelogFragments(options?: ChangelogOptions): void {
|
||||||
|
readChangeFragments(options?.cwd ?? process.cwd(), options?.deps);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function verifyChangelogReadyForRelease(options?: ChangelogOptions): void {
|
||||||
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
|
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
verifyRequestedVersionMatchesPackageVersion(options ?? {});
|
||||||
|
const version = resolveVersion(options ?? {});
|
||||||
|
const pendingFragments = resolveFragmentPaths(cwd, options?.deps);
|
||||||
|
if (pendingFragments.length > 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Pending changelog fragments must be released first: ${pendingFragments.join(', ')}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const changelogPath = path.join(cwd, 'CHANGELOG.md');
|
||||||
|
if (!(options?.deps?.existsSync ?? fs.existsSync)(changelogPath)) {
|
||||||
|
throw new Error(`Missing ${changelogPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const changelog = readFileSync(changelogPath, 'utf8');
|
||||||
|
if (extractReleaseSectionBody(changelog, version) === null) {
|
||||||
|
throw new Error(`Missing CHANGELOG section for v${version}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isFragmentPath(candidate: string): boolean {
|
||||||
|
return /^changes\/.+\.md$/u.test(candidate) && !/\/?README\.md$/iu.test(candidate);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isIgnoredPullRequestPath(candidate: string): boolean {
|
||||||
|
return (
|
||||||
|
candidate === 'CHANGELOG.md' ||
|
||||||
|
candidate === 'release/release-notes.md' ||
|
||||||
|
candidate === 'AGENTS.md' ||
|
||||||
|
candidate === 'README.md' ||
|
||||||
|
candidate.startsWith('changes/') ||
|
||||||
|
candidate.startsWith('docs/') ||
|
||||||
|
candidate.startsWith('.github/') ||
|
||||||
|
candidate.startsWith('backlog/')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function verifyPullRequestChangelog(options: PullRequestChangelogOptions): void {
|
||||||
|
const labels = (options.changedLabels ?? []).map((label) => label.trim()).filter(Boolean);
|
||||||
|
if (labels.includes(SKIP_CHANGELOG_LABEL)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedEntries = options.changedEntries
|
||||||
|
.map((entry) => ({
|
||||||
|
path: entry.path.trim(),
|
||||||
|
status: entry.status.trim().toUpperCase(),
|
||||||
|
}))
|
||||||
|
.filter((entry) => entry.path);
|
||||||
|
if (normalizedEntries.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasFragment = normalizedEntries.some(
|
||||||
|
(entry) => entry.status !== 'D' && isFragmentPath(entry.path),
|
||||||
|
);
|
||||||
|
const requiresFragment = normalizedEntries.some((entry) => !isIgnoredPullRequestPath(entry.path));
|
||||||
|
|
||||||
|
if (requiresFragment && !hasFragment) {
|
||||||
|
throw new Error(
|
||||||
|
`This pull request changes release-relevant files and requires a changelog fragment under changes/ or the ${SKIP_CHANGELOG_LABEL} label.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveChangedPathsFromGit(
|
||||||
|
cwd: string,
|
||||||
|
baseRef: string,
|
||||||
|
headRef: string,
|
||||||
|
): Array<{ path: string; status: string }> {
|
||||||
|
const output = execFileSync('git', ['diff', '--name-status', `${baseRef}...${headRef}`], {
|
||||||
|
cwd,
|
||||||
|
encoding: 'utf8',
|
||||||
|
});
|
||||||
|
return output
|
||||||
|
.split(/\r?\n/)
|
||||||
|
.map((line) => line.trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
.map((line) => {
|
||||||
|
const [status = '', ...paths] = line.split(/\s+/);
|
||||||
|
return {
|
||||||
|
path: paths[paths.length - 1] ?? '',
|
||||||
|
status,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.filter((entry) => entry.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function writeReleaseNotesForVersion(options?: ChangelogOptions): string {
|
||||||
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
|
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
const version = resolveVersion(options ?? {});
|
||||||
|
const changelogPath = path.join(cwd, 'CHANGELOG.md');
|
||||||
|
const changelog = readFileSync(changelogPath, 'utf8');
|
||||||
|
const changes = extractReleaseSectionBody(changelog, version);
|
||||||
|
|
||||||
|
if (changes === null) {
|
||||||
|
throw new Error(`Missing CHANGELOG section for v${version}.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return writeReleaseNotesFile(cwd, changes, options?.deps);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseCliArgs(argv: string[]): {
|
||||||
|
baseRef?: string;
|
||||||
|
cwd?: string;
|
||||||
|
date?: string;
|
||||||
|
headRef?: string;
|
||||||
|
labels?: string;
|
||||||
|
version?: string;
|
||||||
|
} {
|
||||||
|
const parsed: {
|
||||||
|
baseRef?: string;
|
||||||
|
cwd?: string;
|
||||||
|
date?: string;
|
||||||
|
headRef?: string;
|
||||||
|
labels?: string;
|
||||||
|
version?: string;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
for (let index = 0; index < argv.length; index += 1) {
|
||||||
|
const current = argv[index];
|
||||||
|
const next = argv[index + 1];
|
||||||
|
|
||||||
|
if (current === '--cwd' && next) {
|
||||||
|
parsed.cwd = next;
|
||||||
|
index += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current === '--date' && next) {
|
||||||
|
parsed.date = next;
|
||||||
|
index += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current === '--version' && next) {
|
||||||
|
parsed.version = next;
|
||||||
|
index += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current === '--base-ref' && next) {
|
||||||
|
parsed.baseRef = next;
|
||||||
|
index += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current === '--head-ref' && next) {
|
||||||
|
parsed.headRef = next;
|
||||||
|
index += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current === '--labels' && next) {
|
||||||
|
parsed.labels = next;
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
function main(): void {
|
||||||
|
const [command = 'build', ...argv] = process.argv.slice(2);
|
||||||
|
const options = parseCliArgs(argv);
|
||||||
|
|
||||||
|
if (command === 'build') {
|
||||||
|
writeChangelogArtifacts(options);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (command === 'check') {
|
||||||
|
verifyChangelogReadyForRelease(options);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (command === 'lint') {
|
||||||
|
verifyChangelogFragments(options);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (command === 'pr-check') {
|
||||||
|
verifyChangelogFragments(options);
|
||||||
|
verifyPullRequestChangelog({
|
||||||
|
changedLabels: options.labels?.split(',') ?? [],
|
||||||
|
changedEntries: resolveChangedPathsFromGit(
|
||||||
|
options.cwd ?? process.cwd(),
|
||||||
|
options.baseRef ?? 'origin/main',
|
||||||
|
options.headRef ?? 'HEAD',
|
||||||
|
),
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (command === 'release-notes') {
|
||||||
|
writeReleaseNotesForVersion(options);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unknown changelog command: ${command}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (require.main === module) {
|
||||||
|
main();
|
||||||
|
}
|
||||||
30
scripts/build-win-unsigned.mjs
Normal file
30
scripts/build-win-unsigned.mjs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { spawnSync } from 'node:child_process';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
const env = { ...process.env };
|
||||||
|
|
||||||
|
for (const name of [
|
||||||
|
'CSC_LINK',
|
||||||
|
'CSC_KEY_PASSWORD',
|
||||||
|
'WIN_CSC_LINK',
|
||||||
|
'WIN_CSC_KEY_PASSWORD',
|
||||||
|
'CSC_NAME',
|
||||||
|
'WIN_CSC_NAME',
|
||||||
|
]) {
|
||||||
|
delete env[name];
|
||||||
|
}
|
||||||
|
|
||||||
|
env.CSC_IDENTITY_AUTO_DISCOVERY = 'false';
|
||||||
|
|
||||||
|
const electronBuilderCli = fileURLToPath(new URL('../node_modules/electron-builder/out/cli/cli.js', import.meta.url));
|
||||||
|
|
||||||
|
const result = spawnSync(process.execPath, [electronBuilderCli, '--win', 'nsis', 'zip', '--publish', 'never'], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
throw result.error;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(result.status ?? 1);
|
||||||
@@ -13,13 +13,17 @@ const submodulePackageLockPath = path.join(submoduleDir, 'package-lock.json');
|
|||||||
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
|
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
|
||||||
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
|
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
|
||||||
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
|
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
|
||||||
const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
const bunCommand = process.versions.bun ? process.execPath : 'bun';
|
||||||
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
|
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
|
||||||
|
|
||||||
function run(command, args, cwd) {
|
function run(command, args, cwd) {
|
||||||
execFileSync(command, args, { cwd, stdio: 'inherit' });
|
execFileSync(command, args, { cwd, stdio: 'inherit' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function escapePowerShellString(value) {
|
||||||
|
return value.replaceAll("'", "''");
|
||||||
|
}
|
||||||
|
|
||||||
function readCommand(command, args, cwd) {
|
function readCommand(command, args, cwd) {
|
||||||
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
|
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
|
||||||
}
|
}
|
||||||
@@ -78,7 +82,7 @@ function ensureDependenciesInstalled() {
|
|||||||
} catch {}
|
} catch {}
|
||||||
|
|
||||||
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
|
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
|
||||||
run(npmCommand, ['ci'], submoduleDir);
|
run(bunCommand, ['install', '--no-save'], submoduleDir);
|
||||||
fs.mkdirSync(nodeModulesDir, { recursive: true });
|
fs.mkdirSync(nodeModulesDir, { recursive: true });
|
||||||
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
|
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
|
||||||
}
|
}
|
||||||
@@ -86,7 +90,7 @@ function ensureDependenciesInstalled() {
|
|||||||
|
|
||||||
function installAndBuild() {
|
function installAndBuild() {
|
||||||
ensureDependenciesInstalled();
|
ensureDependenciesInstalled();
|
||||||
run(npmCommand, ['run', 'build', '--', '--target', 'chrome'], submoduleDir);
|
run(bunCommand, ['./dev/bin/build.js', '--target', 'chrome'], submoduleDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
function extractBuild() {
|
function extractBuild() {
|
||||||
@@ -96,7 +100,22 @@ function extractBuild() {
|
|||||||
|
|
||||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
|
||||||
try {
|
try {
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
run(
|
||||||
|
'powershell.exe',
|
||||||
|
[
|
||||||
|
'-NoProfile',
|
||||||
|
'-NonInteractive',
|
||||||
|
'-ExecutionPolicy',
|
||||||
|
'Bypass',
|
||||||
|
'-Command',
|
||||||
|
`Expand-Archive -LiteralPath '${escapePowerShellString(zipPath)}' -DestinationPath '${escapePowerShellString(tempDir)}' -Force`,
|
||||||
|
],
|
||||||
|
repoRoot,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
|
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
|
||||||
|
}
|
||||||
fs.rmSync(buildOutputDir, { recursive: true, force: true });
|
fs.rmSync(buildOutputDir, { recursive: true, force: true });
|
||||||
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
|
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
|
||||||
fs.cpSync(tempDir, buildOutputDir, { recursive: true });
|
fs.cpSync(tempDir, buildOutputDir, { recursive: true });
|
||||||
|
|||||||
101
scripts/configure-plugin-binary-path.mjs
Normal file
101
scripts/configure-plugin-binary-path.mjs
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
function normalizeCandidate(candidate) {
|
||||||
|
if (typeof candidate !== 'string') return '';
|
||||||
|
const trimmed = candidate.trim();
|
||||||
|
return trimmed.length > 0 ? trimmed : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function fileExists(candidate) {
|
||||||
|
try {
|
||||||
|
return fs.statSync(candidate).isFile();
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function unique(values) {
|
||||||
|
return Array.from(new Set(values.filter((value) => value.length > 0)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function findWindowsBinary(repoRoot) {
|
||||||
|
const homeDir = process.env.HOME?.trim() || process.env.USERPROFILE?.trim() || '';
|
||||||
|
const appDataDir = process.env.APPDATA?.trim() || '';
|
||||||
|
const derivedLocalAppData =
|
||||||
|
appDataDir && /[\\/]Roaming$/i.test(appDataDir)
|
||||||
|
? appDataDir.replace(/[\\/]Roaming$/i, `${path.sep}Local`)
|
||||||
|
: '';
|
||||||
|
const localAppData =
|
||||||
|
process.env.LOCALAPPDATA?.trim() ||
|
||||||
|
derivedLocalAppData ||
|
||||||
|
(homeDir ? path.join(homeDir, 'AppData', 'Local') : '');
|
||||||
|
const programFiles = process.env.ProgramFiles?.trim() || 'C:\\Program Files';
|
||||||
|
const programFilesX86 = process.env['ProgramFiles(x86)']?.trim() || 'C:\\Program Files (x86)';
|
||||||
|
|
||||||
|
const candidates = unique([
|
||||||
|
normalizeCandidate(process.env.SUBMINER_BINARY_PATH),
|
||||||
|
normalizeCandidate(process.env.SUBMINER_APPIMAGE_PATH),
|
||||||
|
localAppData ? path.join(localAppData, 'Programs', 'SubMiner', 'SubMiner.exe') : '',
|
||||||
|
path.join(programFiles, 'SubMiner', 'SubMiner.exe'),
|
||||||
|
path.join(programFilesX86, 'SubMiner', 'SubMiner.exe'),
|
||||||
|
'C:\\SubMiner\\SubMiner.exe',
|
||||||
|
path.join(repoRoot, 'release', 'win-unpacked', 'SubMiner.exe'),
|
||||||
|
path.join(repoRoot, 'release', 'SubMiner', 'SubMiner.exe'),
|
||||||
|
path.join(repoRoot, 'release', 'SubMiner.exe'),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return candidates.find((candidate) => fileExists(candidate)) || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function rewriteBinaryPath(configPath, binaryPath) {
|
||||||
|
const content = fs.readFileSync(configPath, 'utf8');
|
||||||
|
const normalizedPath = binaryPath.replace(/\r?\n/g, ' ').trim();
|
||||||
|
const updated = content.replace(/^binary_path=.*$/m, `binary_path=${normalizedPath}`);
|
||||||
|
if (updated !== content) {
|
||||||
|
fs.writeFileSync(configPath, updated, 'utf8');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function rewriteSocketPath(configPath, socketPath) {
|
||||||
|
const content = fs.readFileSync(configPath, 'utf8');
|
||||||
|
const normalizedPath = socketPath.replace(/\r?\n/g, ' ').trim();
|
||||||
|
const updated = content.replace(/^socket_path=.*$/m, `socket_path=${normalizedPath}`);
|
||||||
|
if (updated !== content) {
|
||||||
|
fs.writeFileSync(configPath, updated, 'utf8');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, , configPathArg, repoRootArg, platformArg] = process.argv;
|
||||||
|
const configPath = normalizeCandidate(configPathArg);
|
||||||
|
const repoRoot = normalizeCandidate(repoRootArg) || process.cwd();
|
||||||
|
const platform = normalizeCandidate(platformArg) || process.platform;
|
||||||
|
|
||||||
|
if (!configPath) {
|
||||||
|
console.error('[ERROR] Missing plugin config path');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fileExists(configPath)) {
|
||||||
|
console.error(`[ERROR] Plugin config not found: ${configPath}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (platform !== 'win32') {
|
||||||
|
console.log('[INFO] Skipping binary_path rewrite for non-Windows platform');
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const windowsSocketPath = '\\\\.\\pipe\\subminer-socket';
|
||||||
|
rewriteSocketPath(configPath, windowsSocketPath);
|
||||||
|
|
||||||
|
const binaryPath = findWindowsBinary(repoRoot);
|
||||||
|
if (!binaryPath) {
|
||||||
|
console.warn(
|
||||||
|
`[WARN] Configured plugin socket_path=${windowsSocketPath} but could not detect SubMiner.exe; set binary_path manually or provide SUBMINER_BINARY_PATH`,
|
||||||
|
);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
rewriteBinaryPath(configPath, binaryPath);
|
||||||
|
console.log(`[INFO] Configured plugin socket_path=${windowsSocketPath} binary_path=${binaryPath}`);
|
||||||
@@ -20,6 +20,11 @@ private struct WindowGeometry {
|
|||||||
let height: Int
|
let height: Int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private struct WindowState {
|
||||||
|
let geometry: WindowGeometry
|
||||||
|
let focused: Bool
|
||||||
|
}
|
||||||
|
|
||||||
private let targetMpvSocketPath: String? = {
|
private let targetMpvSocketPath: String? = {
|
||||||
guard CommandLine.arguments.count > 1 else {
|
guard CommandLine.arguments.count > 1 else {
|
||||||
return nil
|
return nil
|
||||||
@@ -136,7 +141,11 @@ private func geometryFromAXWindow(_ axWindow: AXUIElement) -> WindowGeometry? {
|
|||||||
return geometry
|
return geometry
|
||||||
}
|
}
|
||||||
|
|
||||||
private func geometryFromAccessibilityAPI() -> WindowGeometry? {
|
private func frontmostApplicationPid() -> pid_t? {
|
||||||
|
NSWorkspace.shared.frontmostApplication?.processIdentifier
|
||||||
|
}
|
||||||
|
|
||||||
|
private func windowStateFromAccessibilityAPI() -> WindowState? {
|
||||||
let runningApps = NSWorkspace.shared.runningApplications.filter { app in
|
let runningApps = NSWorkspace.shared.runningApplications.filter { app in
|
||||||
guard let name = app.localizedName else {
|
guard let name = app.localizedName else {
|
||||||
return false
|
return false
|
||||||
@@ -144,6 +153,8 @@ private func geometryFromAccessibilityAPI() -> WindowGeometry? {
|
|||||||
return normalizedMpvName(name)
|
return normalizedMpvName(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let frontmostPid = frontmostApplicationPid()
|
||||||
|
|
||||||
for app in runningApps {
|
for app in runningApps {
|
||||||
let appElement = AXUIElementCreateApplication(app.processIdentifier)
|
let appElement = AXUIElementCreateApplication(app.processIdentifier)
|
||||||
if !windowHasTargetSocket(app.processIdentifier) {
|
if !windowHasTargetSocket(app.processIdentifier) {
|
||||||
@@ -173,7 +184,10 @@ private func geometryFromAccessibilityAPI() -> WindowGeometry? {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let geometry = geometryFromAXWindow(window) {
|
if let geometry = geometryFromAXWindow(window) {
|
||||||
return geometry
|
return WindowState(
|
||||||
|
geometry: geometry,
|
||||||
|
focused: frontmostPid == windowPid
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -181,11 +195,12 @@ private func geometryFromAccessibilityAPI() -> WindowGeometry? {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
private func geometryFromCoreGraphics() -> WindowGeometry? {
|
private func windowStateFromCoreGraphics() -> WindowState? {
|
||||||
// Keep the CG fallback for environments without Accessibility permissions.
|
// Keep the CG fallback for environments without Accessibility permissions.
|
||||||
// Use on-screen layer-0 windows to avoid off-screen helpers/shadows.
|
// Use on-screen layer-0 windows to avoid off-screen helpers/shadows.
|
||||||
let options: CGWindowListOption = [.optionOnScreenOnly, .excludeDesktopElements]
|
let options: CGWindowListOption = [.optionOnScreenOnly, .excludeDesktopElements]
|
||||||
let windowList = CGWindowListCopyWindowInfo(options, kCGNullWindowID) as? [[String: Any]] ?? []
|
let windowList = CGWindowListCopyWindowInfo(options, kCGNullWindowID) as? [[String: Any]] ?? []
|
||||||
|
let frontmostPid = frontmostApplicationPid()
|
||||||
|
|
||||||
for window in windowList {
|
for window in windowList {
|
||||||
guard let ownerName = window[kCGWindowOwnerName as String] as? String,
|
guard let ownerName = window[kCGWindowOwnerName as String] as? String,
|
||||||
@@ -226,14 +241,19 @@ private func geometryFromCoreGraphics() -> WindowGeometry? {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
return geometry
|
return WindowState(
|
||||||
|
geometry: geometry,
|
||||||
|
focused: frontmostPid == ownerPid
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if let window = geometryFromAccessibilityAPI() ?? geometryFromCoreGraphics() {
|
if let window = windowStateFromAccessibilityAPI() ?? windowStateFromCoreGraphics() {
|
||||||
print("\(window.x),\(window.y),\(window.width),\(window.height)")
|
print(
|
||||||
|
"\(window.geometry.x),\(window.geometry.y),\(window.geometry.width),\(window.geometry.height),\(window.focused ? 1 : 0)"
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
print("not-found")
|
print("not-found")
|
||||||
}
|
}
|
||||||
|
|||||||
175
scripts/get-mpv-window-windows.ps1
Normal file
175
scripts/get-mpv-window-windows.ps1
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
param(
|
||||||
|
[ValidateSet('geometry')]
|
||||||
|
[string]$Mode = 'geometry',
|
||||||
|
[string]$SocketPath
|
||||||
|
)
|
||||||
|
|
||||||
|
$ErrorActionPreference = 'Stop'
|
||||||
|
|
||||||
|
try {
|
||||||
|
Add-Type -TypeDefinition @"
|
||||||
|
using System;
|
||||||
|
using System.Runtime.InteropServices;
|
||||||
|
|
||||||
|
public static class SubMinerWindowsHelper {
|
||||||
|
public delegate bool EnumWindowsProc(IntPtr hWnd, IntPtr lParam);
|
||||||
|
|
||||||
|
[StructLayout(LayoutKind.Sequential)]
|
||||||
|
public struct RECT {
|
||||||
|
public int Left;
|
||||||
|
public int Top;
|
||||||
|
public int Right;
|
||||||
|
public int Bottom;
|
||||||
|
}
|
||||||
|
|
||||||
|
[DllImport("user32.dll")]
|
||||||
|
public static extern bool EnumWindows(EnumWindowsProc lpEnumFunc, IntPtr lParam);
|
||||||
|
|
||||||
|
[DllImport("user32.dll")]
|
||||||
|
[return: MarshalAs(UnmanagedType.Bool)]
|
||||||
|
public static extern bool IsWindowVisible(IntPtr hWnd);
|
||||||
|
|
||||||
|
[DllImport("user32.dll")]
|
||||||
|
public static extern bool IsIconic(IntPtr hWnd);
|
||||||
|
|
||||||
|
[DllImport("user32.dll")]
|
||||||
|
public static extern IntPtr GetForegroundWindow();
|
||||||
|
|
||||||
|
[DllImport("user32.dll", SetLastError = true)]
|
||||||
|
public static extern uint GetWindowThreadProcessId(IntPtr hWnd, out uint processId);
|
||||||
|
|
||||||
|
[DllImport("user32.dll", SetLastError = true)]
|
||||||
|
[return: MarshalAs(UnmanagedType.Bool)]
|
||||||
|
public static extern bool GetWindowRect(IntPtr hWnd, out RECT rect);
|
||||||
|
|
||||||
|
[DllImport("dwmapi.dll")]
|
||||||
|
public static extern int DwmGetWindowAttribute(IntPtr hwnd, int dwAttribute, out RECT pvAttribute, int cbAttribute);
|
||||||
|
}
|
||||||
|
"@
|
||||||
|
|
||||||
|
$DWMWA_EXTENDED_FRAME_BOUNDS = 9
|
||||||
|
|
||||||
|
function Get-WindowBounds {
|
||||||
|
param([IntPtr]$hWnd)
|
||||||
|
|
||||||
|
$rect = New-Object SubMinerWindowsHelper+RECT
|
||||||
|
$size = [System.Runtime.InteropServices.Marshal]::SizeOf($rect)
|
||||||
|
$dwmResult = [SubMinerWindowsHelper]::DwmGetWindowAttribute(
|
||||||
|
$hWnd,
|
||||||
|
$DWMWA_EXTENDED_FRAME_BOUNDS,
|
||||||
|
[ref]$rect,
|
||||||
|
$size
|
||||||
|
)
|
||||||
|
|
||||||
|
if ($dwmResult -ne 0) {
|
||||||
|
if (-not [SubMinerWindowsHelper]::GetWindowRect($hWnd, [ref]$rect)) {
|
||||||
|
return $null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$width = $rect.Right - $rect.Left
|
||||||
|
$height = $rect.Bottom - $rect.Top
|
||||||
|
if ($width -le 0 -or $height -le 0) {
|
||||||
|
return $null
|
||||||
|
}
|
||||||
|
|
||||||
|
return [PSCustomObject]@{
|
||||||
|
X = $rect.Left
|
||||||
|
Y = $rect.Top
|
||||||
|
Width = $width
|
||||||
|
Height = $height
|
||||||
|
Area = $width * $height
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$commandLineByPid = @{}
|
||||||
|
if (-not [string]::IsNullOrWhiteSpace($SocketPath)) {
|
||||||
|
foreach ($process in Get-CimInstance Win32_Process) {
|
||||||
|
$commandLineByPid[[uint32]$process.ProcessId] = $process.CommandLine
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$mpvMatches = New-Object System.Collections.Generic.List[object]
|
||||||
|
$foregroundWindow = [SubMinerWindowsHelper]::GetForegroundWindow()
|
||||||
|
$callback = [SubMinerWindowsHelper+EnumWindowsProc]{
|
||||||
|
param([IntPtr]$hWnd, [IntPtr]$lParam)
|
||||||
|
|
||||||
|
if (-not [SubMinerWindowsHelper]::IsWindowVisible($hWnd)) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
if ([SubMinerWindowsHelper]::IsIconic($hWnd)) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
[uint32]$windowProcessId = 0
|
||||||
|
[void][SubMinerWindowsHelper]::GetWindowThreadProcessId($hWnd, [ref]$windowProcessId)
|
||||||
|
if ($windowProcessId -eq 0) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
$process = Get-Process -Id $windowProcessId -ErrorAction Stop
|
||||||
|
} catch {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($process.ProcessName -ine 'mpv') {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (-not [string]::IsNullOrWhiteSpace($SocketPath)) {
|
||||||
|
$commandLine = $commandLineByPid[[uint32]$windowProcessId]
|
||||||
|
if ([string]::IsNullOrWhiteSpace($commandLine)) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
($commandLine -notlike "*--input-ipc-server=$SocketPath*") -and
|
||||||
|
($commandLine -notlike "*--input-ipc-server $SocketPath*")
|
||||||
|
) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$bounds = Get-WindowBounds -hWnd $hWnd
|
||||||
|
if ($null -eq $bounds) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
$mpvMatches.Add([PSCustomObject]@{
|
||||||
|
HWnd = $hWnd
|
||||||
|
X = $bounds.X
|
||||||
|
Y = $bounds.Y
|
||||||
|
Width = $bounds.Width
|
||||||
|
Height = $bounds.Height
|
||||||
|
Area = $bounds.Area
|
||||||
|
IsForeground = ($foregroundWindow -ne [IntPtr]::Zero -and $hWnd -eq $foregroundWindow)
|
||||||
|
})
|
||||||
|
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
|
||||||
|
[void][SubMinerWindowsHelper]::EnumWindows($callback, [IntPtr]::Zero)
|
||||||
|
|
||||||
|
$focusedMatch = $mpvMatches | Where-Object { $_.IsForeground } | Select-Object -First 1
|
||||||
|
if ($null -ne $focusedMatch) {
|
||||||
|
[Console]::Error.WriteLine('focus=focused')
|
||||||
|
} else {
|
||||||
|
[Console]::Error.WriteLine('focus=not-focused')
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($mpvMatches.Count -eq 0) {
|
||||||
|
Write-Output 'not-found'
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
$bestMatch = if ($null -ne $focusedMatch) {
|
||||||
|
$focusedMatch
|
||||||
|
} else {
|
||||||
|
$mpvMatches | Sort-Object -Property Area, Width, Height -Descending | Select-Object -First 1
|
||||||
|
}
|
||||||
|
Write-Output "$($bestMatch.X),$($bestMatch.Y),$($bestMatch.Width),$($bestMatch.Height)"
|
||||||
|
} catch {
|
||||||
|
[Console]::Error.WriteLine($_.Exception.Message)
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user