37 Commits

Author SHA1 Message Date
2f07c3407a chore: bump version to 0.3.0 2026-03-05 20:21:03 -08:00
a5554ec530 docs: capture anki proxy notes and codebase health backlog 2026-03-05 20:14:58 -08:00
f9f2fe6e87 docs: update keyboard-driven yomitan workflow 2026-03-05 20:13:38 -08:00
8ca05859a9 fix: support repeated popup scroll keys 2026-03-05 19:20:55 -08:00
0cac446725 fix: preserve keyboard subtitle navigation state 2026-03-05 18:39:40 -08:00
23623ad1e1 docs(backlog): add keyboard-driven yomitan task record 2026-03-05 01:29:13 -08:00
b623c5e160 fix: improve yomitan keyboard navigation and payload handling 2026-03-05 01:28:54 -08:00
5436e0cd49 chore(docs): remove Plausible tracker integration 2026-03-04 23:04:11 -08:00
beeeee5ebd fix(core): recopy Yomitan extension when patched scripts drift 2026-03-04 23:04:11 -08:00
fdbf769760 feat(renderer): add keyboard-driven yomitan navigation and popup controls 2026-03-04 23:04:11 -08:00
0a36d1aa99 fix(anki): force Yomitan proxy server sync for card auto-enhancement 2026-03-04 23:04:11 -08:00
69ab87c25f feat(renderer): add optional yomitan popup auto-pause 2026-03-04 23:04:11 -08:00
9a30419a23 fix(tokenizer): tighten frequency highlighting exclusions 2026-03-04 23:04:11 -08:00
092c56f98f feat(launcher): migrate aniskip resolution to launcher script opts 2026-03-03 00:38:22 -08:00
10ef535f9a feat(subsync): add replace option and deterministic retimed naming 2026-03-03 00:26:31 -08:00
6c80bd5843 fix(docs): point plausible tracker to /api/event 2026-03-03 00:26:09 -08:00
f0bd0ba355 fix(release): publish via gh cli with clobber upload 2026-03-02 03:00:06 -08:00
be4db24861 make pretty 2026-03-02 02:45:51 -08:00
83d21c4b6d fix: narrow fallback frequency filter type predicate 2026-03-02 02:44:07 -08:00
e744fab067 fix: unblock autoplay on tokenization-ready and defer annotation loading 2026-03-02 02:43:09 -08:00
5167e3a494 docs: add plausible tracker config for docs site 2026-03-02 02:33:45 -08:00
aff4e91bbb fix(startup): async dictionary loading and unblock first tokenization
- move JLPT/frequency dictionary init off sync fs APIs and add cooperative yielding during entry processing

- decouple first tokenization from full warmup by gating only on Yomitan readiness while MeCab/dictionary warmups continue in parallel

- update mpv pause-until-ready OSD copy to tokenization-focused wording and refresh gate regression assertions
2026-03-02 01:48:17 -08:00
737101fe9e fix(tokenizer): lazy yomitan term-only frequency fallback 2026-03-02 01:45:37 -08:00
629fe97ef7 chore(tokenizer): align enrichment regression notes and test typing 2026-03-02 01:45:23 -08:00
fa97472bce perf(tokenizer): optimize mecab POS enrichment lookups 2026-03-02 01:39:44 -08:00
83f13df627 perf(tokenizer): skip known-word lookup in MeCab POS enrichment 2026-03-02 01:38:37 -08:00
cde231b1ff fix(tokenizer): avoid repeated yomitan anki sync checks on no-change 2026-03-02 01:36:22 -08:00
7161fc3513 fix: make tokenization warmup one-shot 2026-03-02 01:33:09 -08:00
9a91951656 perf(tokenizer): cut annotation latency with persistent mecab 2026-03-02 01:15:21 -08:00
11e9c721c6 feat(subtitles): add no-jump subtitle-delay shift commands 2026-03-02 01:12:26 -08:00
3c66ea6b30 fix(jellyfin): preserve discover resume position on remote play 2026-03-01 23:28:03 -08:00
79f37f3986 fix(subtitle): prioritize known and n+1 colors over frequency 2026-03-01 23:23:53 -08:00
f1b85b0751 fix(plugin): keep loading OSD visible during startup gate 2026-03-01 23:23:45 -08:00
1ab5d00de0 bump version 2026-03-01 20:12:59 -08:00
17a417e639 fix(subtitle): improve frequency highlight reliability 2026-03-01 20:12:42 -08:00
68e5a7fef3 fix: sanitize jellyfin misc info formatting 2026-03-01 20:05:19 -08:00
7023a3263f Jellyfin and Subsync Fixes (#13) 2026-03-01 16:13:16 -08:00
138 changed files with 7640 additions and 710 deletions

View File

@@ -278,45 +278,70 @@ jobs:
echo "$CHANGES" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create Release
uses: softprops/action-gh-release@v2
with:
name: ${{ steps.version.outputs.VERSION }}
body: |
## Changes
${{ steps.changelog.outputs.CHANGES }}
- name: Publish Release
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
## Installation
cat > release-body.md <<'EOF'
## Changes
${{ steps.changelog.outputs.CHANGES }}
### AppImage (Recommended)
1. Download the AppImage below
2. Make it executable: `chmod +x SubMiner.AppImage`
3. Run: `./SubMiner.AppImage`
## Installation
### macOS
1. Download `subminer-*.dmg`
2. Open the DMG and drag `SubMiner.app` into `/Applications`
3. If needed, use the ZIP artifact as an alternative
### AppImage (Recommended)
1. Download the AppImage below
2. Make it executable: `chmod +x SubMiner.AppImage`
3. Run: `./SubMiner.AppImage`
### Manual Installation
See the [README](https://github.com/${{ github.repository }}#installation) for manual installation instructions.
### macOS
1. Download `subminer-*.dmg`
2. Open the DMG and drag `SubMiner.app` into `/Applications`
3. If needed, use the ZIP artifact as an alternative
### Optional Assets (config example + mpv plugin + rofi theme)
1. Download `subminer-assets.tar.gz`
2. Extract and copy `config.example.jsonc` to `~/.config/SubMiner/config.jsonc`
3. Copy `plugin/subminer/` directory contents to `~/.config/mpv/scripts/`
4. Copy `plugin/subminer.conf` to `~/.config/mpv/script-opts/`
5. Copy `assets/themes/subminer.rasi` to:
- Linux: `~/.local/share/SubMiner/themes/subminer.rasi`
- macOS: `~/Library/Application Support/SubMiner/themes/subminer.rasi`
### Manual Installation
See the [README](https://github.com/${{ github.repository }}#installation) for manual installation instructions.
Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.
files: |
### Optional Assets (config example + mpv plugin + rofi theme)
1. Download `subminer-assets.tar.gz`
2. Extract and copy `config.example.jsonc` to `~/.config/SubMiner/config.jsonc`
3. Copy `plugin/subminer/` directory contents to `~/.config/mpv/scripts/`
4. Copy `plugin/subminer.conf` to `~/.config/mpv/script-opts/`
5. Copy `assets/themes/subminer.rasi` to:
- Linux: `~/.local/share/SubMiner/themes/subminer.rasi`
- macOS: `~/Library/Application Support/SubMiner/themes/subminer.rasi`
Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.
EOF
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
gh release edit "${{ steps.version.outputs.VERSION }}" \
--title "${{ steps.version.outputs.VERSION }}" \
--notes-file release-body.md \
--prerelease false
else
gh release create "${{ steps.version.outputs.VERSION }}" \
--title "${{ steps.version.outputs.VERSION }}" \
--notes-file release-body.md \
--prerelease false
fi
shopt -s nullglob
artifacts=(
release/*.AppImage
release/*.dmg
release/*.zip
release/*.tar.gz
release/SHA256SUMS.txt
dist/launcher/subminer
draft: false
prerelease: false
)
if [ "${#artifacts[@]}" -eq 0 ]; then
echo "No release artifacts found for upload."
exit 1
fi
for asset in "${artifacts[@]}"; do
gh release upload "${{ steps.version.outputs.VERSION }}" "$asset" --clobber
done

View File

@@ -25,6 +25,7 @@
SubMiner is an Electron overlay that sits on top of mpv. It turns your video player into a full sentence-mining workstation:
- **Hover to look up** — Yomitan dictionary popups directly on subtitles
- **Keyboard-driven lookup mode** — Navigate token-by-token, keep lookup open across tokens, and control popup scrolling/audio/mining without leaving the overlay
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read

View File

@@ -0,0 +1,8 @@
---
id: m-0
title: 'Codebase Health Remediation'
---
## Description
Follow-up work from the March 6, 2026 codebase review: strengthen the runnable test gate, remove confirmed dead architecture, and continue decomposition of oversized runtime entrypoints.

View File

@@ -6,7 +6,7 @@ title: >-
status: Done
assignee: []
created_date: '2026-02-28 02:38'
updated_date: '2026-02-28 22:36'
updated_date: '2026-03-04 13:55'
labels: []
dependencies: []
references:
@@ -49,4 +49,10 @@ Risk/impact context:
Completed implementation in branch working tree; ready to merge once local changes are committed and test gate passes.
Follow-up fix (2026-03-04):
- Updated bundled Yomitan server-sync behavior to target `profileCurrent` instead of hardcoded `profiles[0]`.
- Added proxy-mode force override so bundled Yomitan always points at SubMiner proxy URL when `ankiConnect.proxy.enabled=true`; this ensures mined cards pass through proxy and trigger auto-enrichment.
- Added regression tests for blocked existing-server case and force-override injection path.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -4,7 +4,7 @@ title: 'Subtitle hover: auto-pause playback with config toggle'
status: Done
assignee: []
created_date: '2026-02-28 22:43'
updated_date: '2026-02-28 22:43'
updated_date: '2026-03-04 12:07'
labels: []
dependencies: []
priority: medium
@@ -18,6 +18,7 @@ ordinal: 8000
Add a user-facing subtitle config option to pause mpv playback when the cursor hovers subtitle text and resume playback when the cursor leaves.
Scope:
- New config key: `subtitleStyle.autoPauseVideoOnHover`.
- Default should be enabled.
- Hover pause/resume must not unpause if playback was already paused before hover.
@@ -42,4 +43,11 @@ Scope:
Implemented `subtitleStyle.autoPauseVideoOnHover` with default `true`, wired through config defaults/resolution/types, renderer state/style, and mouse hover handlers. Added playback pause-state IPC (`getPlaybackPaused`) to avoid false resume when media was already paused. Added renderer hover behavior tests (including race/cancel case) and config/resolve tests. Updated config examples and docs (`README`, usage, shortcuts, mining workflow, configuration) to document default hover pause/resume behavior and disable path.
Follow-up adjustments (2026-03-04):
- Hover pause now resumes immediately when leaving subtitle text (no Yomitan-popup hover retention).
- Added `subtitleStyle.autoPauseVideoOnYomitanPopup` (default `false`) to optionally keep playback paused while Yomitan popup is open, with auto-resume on close only when SubMiner initiated the popup pause.
- Yomitan popup control keybinds added while popup is open: `J/K` scroll, `M` mine, `P` audio play, `[` previous audio variant, `]` next audio variant (within selected source).
- Extension copy drift detection widened so popup runtime changes are reliably re-copied on launch (`popup.js`, `popup-main.js`, `display.js`, `display-audio.js`).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -18,6 +18,7 @@ ordinal: 9000
Add startup gating behavior for wrapper + mpv plugin flow so playback starts paused when visible overlay auto-start is enabled, then auto-resumes only after subtitle tokenization is ready.
Scope:
- Plugin option `auto_start_pause_until_ready` (default `yes`).
- Launcher reads plugin runtime config and starts mpv paused when `auto_start=yes`, `auto_start_visible_overlay=yes`, and `auto_start_pause_until_ready=yes`.
- Main process signals readiness via mpv script message after tokenized subtitle delivery.
@@ -43,6 +44,7 @@ Scope:
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented startup pause gate across launcher/plugin/main runtime:
- Added plugin runtime config parsing in launcher (`auto_start`, `auto_start_visible_overlay`, `auto_start_pause_until_ready`) and mpv start-paused behavior for eligible runs.
- Added plugin auto-play gate state, timeout fallback, and readiness release via `subminer-autoplay-ready` script message.
- Added main-process readiness signaling after tokenization delivery, including unpause fallback command path.

View File

@@ -18,10 +18,12 @@ ordinal: 10000
Fix Jimaku modal UX so selecting a subtitle file closes the modal automatically once subtitle download+load succeeds.
Current behavior:
- Subtitle file downloads and loads into mpv.
- Jimaku modal remains open until manual close.
Expected behavior:
- On successful `jimakuDownloadFile` result, close modal immediately.
- Keep error behavior unchanged (stay open + show error).

View File

@@ -18,11 +18,13 @@ ordinal: 11000
When user selects a Jimaku subtitle, save subtitle with filename derived from currently playing media filename instead of Jimaku release filename.
Example:
- Current media: `anime.mkv`
- Downloaded subtitle extension: `.srt`
- Saved subtitle path: `anime.ja.srt`
Scope:
- Apply in Jimaku download IPC path before writing file.
- Preserve collision-avoidance behavior (suffix with jimaku entry id/counter when target exists).
- Keep mpv load flow unchanged except using renamed path.

View File

@@ -0,0 +1,58 @@
---
id: TASK-81
title: 'Tokenization performance: disable Yomitan MeCab parser, gate local MeCab init, and add persistent MeCab process'
status: Done
assignee: []
created_date: '2026-03-02 07:44'
updated_date: '2026-03-02 20:44'
labels: []
dependencies: []
priority: high
ordinal: 9001
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Reduce subtitle annotation latency by:
- disabling Yomitan-side MeCab parser requests (`useMecabParser=false`);
- initializing local MeCab only when POS-dependent annotations are enabled (N+1 / JLPT / frequency);
- replacing per-line local MeCab process spawning with a persistent parser process that auto-shuts down after idle time and restarts on demand.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Yomitan parse requests disable MeCab parser path.
- [x] #2 MeCab warmup/init is skipped when all POS-dependent annotation toggles are off.
- [x] #3 Local MeCab tokenizer uses persistent process across subtitle lines.
- [x] #4 Persistent MeCab process auto-shuts down after idle timeout and restarts on next tokenize activity.
- [x] #5 Tests cover parser flag, warmup gating, and persistent MeCab lifecycle behavior.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented tokenizer latency optimizations:
- switched Yomitan parse requests to `useMecabParser: false`;
- added annotation-aware MeCab initialization gating in runtime warmup flow;
- added persistent local MeCab process (default idle shutdown: 30s) with queued requests, retry-on-process-end, idle auto-shutdown, and automatic restart on new work;
- added regression tests for Yomitan parse flag, MeCab warmup gating, and persistent/idle lifecycle behavior;
- fixed tokenization warmup gate so first-use warmup completion is sticky (`tokenizationWarmupCompleted`) and sequential `tokenizeSubtitle` calls no longer re-run Yomitan/dictionary warmup path;
- added regression coverage in `src/main/runtime/composers/mpv-runtime-composer.test.ts` for sequential tokenize calls (`warmup` side effects run once);
- post-review critical fix: treat Yomitan default-profile Anki server sync `no-change` as successful check, so `lastSyncedYomitanAnkiServer` is cached and expensive sync checks do not repeat on every subtitle line;
- added regression assertion in `src/core/services/tokenizer/yomitan-parser-runtime.test.ts` for `updated: false` path returning sync success;
- post-review performance fix: refactored POS enrichment to pre-index MeCab tokens by surface plus character-position overlap index, replacing repeated active-candidate filtering/full-scan behavior with direct overlap candidate lookup per token;
- added regression tests in `src/core/services/tokenizer/parser-enrichment-stage.test.ts` for repeated distant-token scan access and repeated active-candidate filter scans; both fail on scan-based behavior and pass with indexed lookup;
- post-review startup fix: moved JLPT/frequency dictionary initialization from synchronous FS APIs to async `fs/promises` path inspection/read and cooperative chunked entry processing to reduce main-thread stall risk during cold start;
- post-review first-line latency fix: decoupled tokenization warmup gating so first `tokenizeSubtitle` only waits on Yomitan extension readiness, while MeCab check + dictionary prewarm continue in parallel background warmups;
- validated with targeted tests and `tsc --noEmit`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,60 @@
---
id: TASK-82
title: 'Subtitle frequency highlighting: fix noisy Yomitan readings and restore known/N+1 color priority'
status: Done
assignee: []
created_date: '2026-03-02 20:10'
updated_date: '2026-03-02 01:44'
labels: []
dependencies: []
priority: high
ordinal: 9002
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Address frequency-highlighting regressions:
- tokens like `断じて` missed rank assignment when Yomitan merged-token reading was truncated/noisy;
- known/N+1 tokens were incorrectly colored by frequency color instead of known/N+1 color.
Expected behavior:
- known/N+1 color always wins;
- if token is frequent and within `topX`, frequency rank label can still appear on hover/metadata.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Frequency lookup succeeds for noisy/truncated merged-token readings via robust fallback behavior.
- [x] #2 Merged-token reading normalization restores missing kana suffixes where safe (`headword === surface` path).
- [x] #3 Known/N+1 tokens keep known/N+1 color classes; frequency color class does not override them.
- [x] #4 Frequency rank hover label remains available for in-range frequent tokens, including known/N+1.
- [x] #5 Regression tests added for tokenizer and renderer behavior.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented and validated:
- tokenizer now normalizes selected Yomitan merged-token readings by appending missing trailing kana suffixes when safe (`headword === surface`);
- frequency lookup now does lazy fallback: requests `{term, reading}` first, and only requests `{term, reading: null}` for misses;
- this removes eager `(term, null)` payload inflation on medium-frequency lines and reduces extension RPC payload/load;
- renderer restored known/N+1 color priority over frequency class coloring;
- frequency rank label display remains available for frequent known/N+1 tokens;
- added regression tests covering noisy-reading fallback, lazy fallback-query behavior, and renderer class/label precedence.
Related commits:
- `17a417e` (`fix(subtitle): improve frequency highlight reliability`)
- `79f37f3` (`fix(subtitle): prioritize known and n+1 colors over frequency`)
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,53 @@
---
id: TASK-83
title: 'Jellyfin subtitle delay: shift to adjacent cue without seek jumps'
status: Done
assignee: []
created_date: '2026-03-02 00:06'
updated_date: '2026-03-02 00:06'
labels: []
dependencies: []
priority: high
ordinal: 9003
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add keybinding-friendly special commands that shift `sub-delay` to align current subtitle start with next/previous cue start, without `sub-seek` probing (avoid playback jump).
Scope:
- add special commands for next/previous line alignment;
- compute delta from active subtitle cue timeline (external subtitle file/URL, including Jellyfin-delivered URLs);
- apply `add sub-delay <delta>` and show OSD value;
- keep existing proxy OSD behavior for direct `sub-delay` keybinding commands.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 New special commands exist for subtitle-delay shift to next/previous cue boundary.
- [x] #2 Shift logic parses active external subtitle source timings (SRT/VTT/ASS) and computes delta from current `sub-start`.
- [x] #3 Runtime applies delay shift without `sub-seek` and shows OSD feedback.
- [x] #4 Direct `sub-delay` proxy commands also show OSD current value.
- [x] #5 Tests added for cue parsing/shift behavior and IPC dispatch wiring.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented no-jump subtitle-delay alignment commands:
- added `__sub-delay-next-line` and `__sub-delay-prev-line` special commands;
- added `createShiftSubtitleDelayToAdjacentCueHandler` to parse cue start times from active external subtitle source and apply `add sub-delay` delta from current `sub-start`;
- wired command handling through IPC runtime deps into main runtime;
- retained/extended OSD proxy feedback for `sub-delay` keybindings;
- updated configuration docs and added regression tests for subtitle-delay shift and IPC command routing.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,37 @@
---
id: TASK-84
title: 'Docs Plausible endpoint uses /api/event path'
status: Done
assignee: []
created_date: '2026-03-03 00:00'
updated_date: '2026-03-03 00:00'
labels: []
dependencies: []
priority: medium
ordinal: 12000
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Fix VitePress docs Plausible tracker config to post to hosted worker API event endpoint instead of worker root URL.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Docs theme Plausible `endpoint` points to `https://worker.subminer.moe/api/event`.
- [x] #2 Plausible docs test asserts `/api/event` endpoint path.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Updated docs Plausible tracker endpoint to `https://worker.subminer.moe/api/event` and updated regression test expectation accordingly.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,70 @@
---
id: TASK-84
title: Migrate AniSkip metadata+lookup orchestration to launcher/Electron
status: Done
assignee:
- Codex
created_date: '2026-03-03 08:31'
updated_date: '2026-03-03 08:35'
labels:
- enhancement
- aniskip
- launcher
- mpv-plugin
dependencies: []
references:
- launcher/aniskip-metadata.ts
- launcher/mpv.ts
- plugin/subminer/aniskip.lua
- plugin/subminer/options.lua
- plugin/subminer/state.lua
- plugin/subminer/lifecycle.lua
- plugin/subminer/messages.lua
- plugin/subminer.conf
- launcher/aniskip-metadata.test.ts
documentation:
- docs/mpv-plugin.md
- launcher/aniskip-metadata.ts
- plugin/subminer/aniskip.lua
- docs/architecture.md
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Move AniSkip MAL/title-to-MAL lookup and intro payload resolution from mpv Lua to launcher Electron flow, while keeping mpv-side intro skip UX and chapter/chapter prompt behavior in plugin. Launcher should infer/analyze file metadata, fetch AniSkip payload when launching files, and pass resolved skip window via script options; plugin should trust launcher payload and fall back only when absent.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Launcher infers AniSkip metadata for file targets using existing guessit/fallback logic and performs AniSkip MAL + payload resolution during mpv startup.
- [x] #2 Launcher injects script options containing resolved MAL id and intro window fields (or explicit lookup-failure status) into mpv startup.
- [x] #3 Lua plugin consumes launcher-provided AniSkip intro data and skips all network lookups when payload is present.
- [x] #4 Standalone mpv/plugin usage without launcher payload continues to function using existing async in-plugin lookup path.
- [x] #5 Docs and defaults are updated to document new script-option contract.
- [x] #6 Launcher tests cover payload generation contract and fallback behavior where metadata is unavailable.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add launcher-side AniSkip payload resolution helpers in launcher/aniskip-metadata.ts (MAL prefix lookup + AniSkip payload fetch + result normalization).
2. Wire launcher/mpv.ts + buildSubminerScriptOpts to pass resolved AniSkip fields/mode in --script-opts for file playback.
3. Update plugin/subminer/aniskip.lua plus options/state to consume injected payload: if intro_start/end present, apply immediately and skip network lookup; otherwise retain existing async behavior.
4. Ensure fallback for standalone mpv usage remains intact for no-launcher/manual refresh.
5. Add/update tests/docs/config references for new script-opt contract and edge cases.
<!-- SECTION:PLAN:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Executed end-to-end migration so launcher resolves AniSkip title/MAL/payload before mpv start and injects it via --script-opts. Plugin now parses and consumes launcher payload (JSON/url/base64), applies OP intro from payload, tracks payload metadata in state, and keeps legacy async lookup path for non-launcher/absent payload playback. Added launcher config key aniskip_payload and updated launcher/aniskip-metadata tests for resolve/payload behavior and contract validation.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,38 @@
---
id: TASK-85
title: 'Remove docs Plausible analytics integration'
status: Done
assignee: []
created_date: '2026-03-03 00:00'
updated_date: '2026-03-03 00:00'
labels: []
dependencies: []
priority: medium
ordinal: 12001
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Remove Plausible analytics integration from docs theme and dependency graph. Keep docs build/runtime analytics-free.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Docs theme no longer imports or initializes Plausible tracker.
- [x] #2 `@plausible-analytics/tracker` removed from dependencies and lockfile.
- [x] #3 Docs analytics test reflects absence of Plausible wiring.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Deleted Plausible runtime wiring from VitePress theme, removed tracker package via `bun remove`, and updated docs test to assert no Plausible integration remains.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,68 @@
---
id: TASK-86
title: 'Renderer: keyboard-driven Yomitan lookup mode and popup key forwarding'
status: Done
assignee:
- Codex
created_date: '2026-03-04 13:40'
updated_date: '2026-03-05 11:30'
labels:
- enhancement
- renderer
- yomitan
dependencies:
- TASK-77
references:
- src/renderer/handlers/keyboard.ts
- src/renderer/handlers/mouse.ts
- src/renderer/renderer.ts
- src/renderer/state.ts
- src/renderer/yomitan-popup.ts
- src/core/services/overlay-window.ts
- src/preload.ts
- src/shared/ipc/contracts.ts
- src/types.ts
- vendor/yomitan/js/app/frontend.js
- vendor/yomitan/js/app/popup.js
- vendor/yomitan/js/display/display.js
- vendor/yomitan/js/display/popup-main.js
- vendor/yomitan/js/display/display-audio.js
documentation:
- README.md
- docs/usage.md
- docs/shortcuts.md
priority: medium
ordinal: 13000
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add true keyboard-driven token lookup flow in overlay:
- Toggle keyboard token-selection mode and navigate tokens by keyboard (`Arrow` + `HJKL`).
- Toggle Yomitan lookup window for selected token via fixed accelerator (`Ctrl/Cmd+Y`) without requiring mouse click.
- Preserve keyboard-only workflow while popup is open by forwarding popup keys (`J/K`, `M`, `P`, `[`, `]`) and restoring overlay focus on popup close.
- Ensure selection styling and hover metadata tooltips (frequency/JLPT) work for keyboard-selected token.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Keyboard mode toggle exists and shows visual selection outline for active token.
- [x] #2 Navigation works via arrows and vim keys while keyboard mode is enabled.
- [x] #3 Lookup window toggles from selected token with `Ctrl/Cmd+Y`; close path restores overlay keyboard focus.
- [x] #4 Popup-local controls work via keyboard forwarding (`J/K`, `M`, `P`, `[`, `]`), including mine action.
- [x] #5 Frequency/JLPT hover tags render for keyboard-selected token.
- [x] #6 Renderer/runtime tests cover new visibility/selection behavior, and docs are updated.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented keyboard-driven Yomitan workflow end-to-end in renderer + bundled Yomitan runtime bridge. Added overlay-level keyboard mode state, token selection sync, lookup toggle routing, popup command forwarding, and focus recovery after popup close. Follow-up fixes kept lookup open while moving between tokens, made popup-local `J/K` and `ArrowUp/ArrowDown` scroll work from overlay-owned focus with key repeat, skipped keyboard/token annotation flow for parser groups that have no dictionary-backed headword, and preserved paused playback when token navigation jumps across subtitle lines. Updated user docs/README to document the final shortcut behavior.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,71 @@
---
id: TASK-87
title: >-
Codebase health: harden verification and retire dead architecture identified
in the March 2026 review
status: To Do
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:20'
labels:
- tech-debt
- tests
- maintainability
milestone: m-0
dependencies: []
references:
- package.json
- README.md
- src/main.ts
- src/anki-integration.ts
- src/core/services/immersion-tracker-service.test.ts
- src/translators/index.ts
- src/subsync/engines.ts
- src/subtitle/pipeline.ts
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Track the remediation work from the March 6, 2026 code review. The review found that the default test gate only exercises 53 of 241 test files, the dedicated subtitle test lane is a no-op, SQLite-backed immersion tracking tests are conditionally skipped in the standard Bun run, src/main.ts still contains a large dead-symbol backlog, several registry/pipeline modules appear unreferenced from live execution paths, and src/anki-integration.ts remains an oversized orchestration file. This parent task should coordinate a safe sequence: improve verification first, then remove dead code and continue decomposition with good test coverage in place.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Child tasks are created for each remediation workstream with explicit dependencies and enough context for an isolated agent to execute them.
- [ ] #2 The parent task records the recommended sequencing and parallelization strategy so replacement agents can resume without conversation history.
- [ ] #3 Completion of the parent task leaves the repository with a materially more trustworthy test gate, less dead architecture, and clearer ownership boundaries for the main runtime and Anki integration surfaces.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
Recommended sequencing:
1. Run TASK-87.1, TASK-87.2, TASK-87.3, and TASK-87.7 first. These are the safety-net and tooling tasks and can largely proceed in parallel.
2. Start TASK-87.4 once TASK-87.1 lands so src/main.ts cleanup happens under a more trustworthy test matrix.
3. Start TASK-87.5 after TASK-87.1 and TASK-87.2 so dead subsync/pipeline cleanup happens with stronger subtitle and runtime verification.
4. Start TASK-87.6 after TASK-87.1 so Anki refactors happen with broader default coverage in place.
5. Keep PRs focused: do not combine verification work with architectural cleanup unless a narrow dependency requires it.
Parallelization guidance:
- Wave 1 parallel: TASK-87.1, TASK-87.2, TASK-87.3, TASK-87.7
- Wave 2 parallel: TASK-87.4, TASK-87.5, TASK-87.6
Shared review context to restate in child tasks:
- Standard test scripts currently reference only 53 unique test files out of 241 discovered test and type-test files under src/ and launcher/.
- test:subtitle is currently a placeholder echo even though subtitle sync is a user-facing feature.
- SQLite-backed immersion tracker tests are conditionally skipped in the standard Bun run.
- src/main.ts trips many noUnusedLocals/noUnusedParameters diagnostics.
- src/translators/index.ts, src/subsync/engines.ts, src/subtitle/pipeline.ts, src/tokenizers/index.ts, and src/token-mergers/index.ts appeared unreferenced during review and must be re-verified before deletion.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,53 @@
---
id: TASK-87.1
title: >-
Testing workflow: make standard test commands reflect the maintained test
surface
status: To Do
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:21'
labels:
- tests
- maintainability
milestone: m-0
dependencies: []
references:
- package.json
- src/main-entry-runtime.test.ts
- src/anki-integration/anki-connect-proxy.test.ts
- src/main/runtime/jellyfin-remote-playback.test.ts
- src/main/runtime/registry.test.ts
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
parent_task_id: TASK-87
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The current package scripts hand-enumerate a small subset of test files, which leaves the standard green signal misleading. A local audit found 241 test/type-test files under src/ and launcher/, but only 53 unique files referenced by the standard package.json test scripts. This task should redesign the runnable test matrix so maintained tests are either executed by the standard commands or intentionally excluded through a documented rule, instead of silently drifting out of coverage.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The repository has a documented and reproducible test matrix for standard development commands, including which suites belong in the default lane versus slower or environment-specific lanes.
- [ ] #2 The standard test entrypoints stop relying on a brittle hand-maintained allowlist for the currently covered unit and integration suites, or an explicit documented mechanism exists that prevents silent omission of new tests.
- [ ] #3 Representative tests that were previously outside the standard lane from src/main/runtime, src/anki-integration, and entry/runtime surfaces are executed by an automated command and included in the documented matrix.
- [ ] #4 Documentation for contributors explains which command to run for fast verification, full verification, and environment-specific verification.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Inventory the current test surface under src/ and launcher/ and compare it to package.json scripts to classify fast, full, slow, and environment-specific suites.
2. Replace or reduce the brittle hand-maintained allowlist so new maintained tests do not silently miss the standard matrix.
3. Update contributor docs with the intended fast/full/environment-specific commands.
4. Verify the new matrix by running the relevant commands and by demonstrating at least one previously omitted runtime/Anki/entry test now belongs to an automated lane.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,53 @@
---
id: TASK-87.2
title: >-
Subtitle sync verification: replace the no-op subtitle lane with real
automated coverage
status: To Do
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:21'
labels:
- tests
- subsync
milestone: m-0
dependencies: []
references:
- package.json
- README.md
- src/core/services/subsync.ts
- src/core/services/subsync.test.ts
- src/subsync/utils.ts
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
parent_task_id: TASK-87
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
SubMiner advertises subtitle syncing with alass and ffsubsync, but the dedicated test:subtitle command currently does not run any tests. There is already lower-level coverage in src/core/services/subsync.test.ts, but the test matrix and contributor-facing commands do not reflect that reality. This task should replace the no-op lane with real verification, align scripts with the existing subsync test surface, and make the user-facing docs honest about how subtitle sync is verified.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The test:subtitle entrypoint runs real automated verification instead of echoing a placeholder message.
- [ ] #2 The subtitle verification lane covers both alass and ffsubsync behavior, including at least one non-happy-path scenario relevant to current functionality.
- [ ] #3 Contributor-facing documentation points to the real subtitle verification command and no longer implies a dedicated test lane exists when it does not.
- [ ] #4 The resulting verification strategy integrates cleanly with the repository-wide test matrix without duplicating or hiding existing subsync coverage.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Audit the existing subtitle-sync test surface, especially src/core/services/subsync.test.ts, and decide whether test:subtitle should reuse or regroup that coverage.
2. Replace the placeholder script with a real automated command and keep the matrix legible alongside TASK-87.1 work.
3. Update README or related docs so the advertised subtitle verification path matches reality.
4. Verify both alass and ffsubsync behavior remain covered by the resulting lane.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,52 @@
---
id: TASK-87.3
title: >-
Immersion tracking verification: make SQLite-backed persistence tests visible
and reproducible
status: To Do
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:21'
labels:
- tests
- immersion-tracking
milestone: m-0
dependencies: []
references:
- src/core/services/immersion-tracker-service.test.ts
- src/core/services/immersion-tracker/storage-session.test.ts
- src/core/services/immersion-tracker-service.ts
- package.json
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
parent_task_id: TASK-87
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The immersion tracker is persistence-heavy, but its SQLite-backed tests are conditionally skipped in the standard Bun run when node:sqlite support is unavailable. That creates a blind spot around session finalization, telemetry persistence, and retention behavior. This task should establish a reliable automated verification path for the database-backed cases and make the prerequisite/runtime behavior explicit to contributors and CI.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Database-backed immersion tracking tests run in at least one documented automated command that is practical for contributors or CI to execute.
- [ ] #2 If the current runtime cannot execute the SQLite-backed tests, the repository exposes that limitation clearly instead of silently reporting a misleading green result.
- [ ] #3 Contributor-facing documentation explains how to run the immersion tracker verification lane and any environment prerequisites it depends on.
- [ ] #4 The resulting verification covers session persistence or finalization behavior that is not exercised by the pure seam tests alone.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Confirm which SQLite-backed immersion tests are currently skipped and why in the standard Bun environment.
2. Establish a reproducible command or lane for the DB-backed cases, or make the unsupported-runtime limitation explicit and actionable.
3. Document prerequisites and expected behavior for contributors and CI.
4. Verify at least one persistence/finalization path beyond the seam tests is exercised by the new lane.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,53 @@
---
id: TASK-87.4
title: >-
Runtime composition root: remove dead symbols and tighten module boundaries in
src/main.ts
status: To Do
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:21'
labels:
- tech-debt
- runtime
- maintainability
milestone: m-0
dependencies:
- TASK-87.1
references:
- src/main.ts
- src/main/runtime
- package.json
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
parent_task_id: TASK-87
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
A noUnusedLocals/noUnusedParameters compile pass reports a large concentration of dead imports and dead locals in src/main.ts. The file is also far beyond the repos preferred size guideline, which makes the runtime composition root difficult to review and easy to break. This task should remove confirmed dead symbols, continue extracting coherent slices where that improves readability, and leave the entrypoint materially easier to understand without changing behavior.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
- [ ] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
- [ ] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
- [ ] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Re-run the noUnusedLocals/noUnusedParameters compile pass and capture the src/main.ts diagnostics cluster before editing.
2. Remove dead imports, destructured values, and stale locals in small reviewable slices; extract a coherent helper/module only where that materially reduces coupling.
3. Keep changes behavior-preserving and avoid mixing unrelated cleanup outside src/main.ts unless required to compile.
4. Verify with the updated runtime/startup test commands from TASK-87.1 plus a noUnused compile pass.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,57 @@
---
id: TASK-87.5
title: >-
Dead architecture cleanup: delete unused registry and pipeline modules that
are off the live path
status: To Do
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
labels:
- tech-debt
- dead-code
milestone: m-0
dependencies:
- TASK-87.1
- TASK-87.2
references:
- src/translators/index.ts
- src/subsync/engines.ts
- src/subtitle/pipeline.ts
- src/tokenizers/index.ts
- src/token-mergers/index.ts
- src/core/services/subsync.ts
- src/core/services/tokenizer.ts
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
parent_task_id: TASK-87
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The review found several modules that appear self-contained but unused from the applications live execution paths: src/translators/index.ts, src/subsync/engines.ts, src/subtitle/pipeline.ts, src/tokenizers/index.ts, and src/token-mergers/index.ts. At the same time, the real runtime behavior is implemented elsewhere. This task should verify those modules are truly unused, remove or consolidate them, and clean up any stale exports, docs, or tests so contributors are not misled by duplicate architecture.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
- [ ] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
- [ ] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
- [ ] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Re-verify each candidate module is off the live path by tracing imports from current runtime entrypoints before deleting anything.
2. Remove or consolidate truly dead modules and clean associated exports/imports/tests so only the supported path remains obvious.
3. Pay special attention to subtitle sync and tokenization surfaces, since duplicate architecture exists near active code.
4. Verify the relevant tokenization and subsync commands/tests still pass and update any stale docs or notes.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,57 @@
---
id: TASK-87.6
title: >-
Anki integration maintainability: continue decomposing the oversized
orchestration layer
status: To Do
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
labels:
- tech-debt
- anki
- maintainability
milestone: m-0
dependencies:
- TASK-87.1
references:
- src/anki-integration.ts
- src/anki-integration/field-grouping-workflow.ts
- src/anki-integration/note-update-workflow.ts
- src/anki-integration/card-creation.ts
- src/anki-integration/anki-connect-proxy.ts
- src/anki-integration.test.ts
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
- docs/anki-integration.md
parent_task_id: TASK-87
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
src/anki-integration.ts remains an oversized orchestration file even after earlier extractions. It still mixes config normalization, polling setup, media generation, duplicate resolution, field grouping workflows, and user feedback coordination in one class. This task should continue the decomposition so the remaining orchestration surface is smaller and easier to reason about, while preserving existing Anki, proxy, field grouping, and note update behavior.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
- [ ] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
- [ ] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
- [ ] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Map the remaining responsibility clusters inside src/anki-integration.ts and choose one or more extraction seams that reduce mixed concerns without changing behavior.
2. Move logic behind narrow interfaces/modules rather than creating another giant helper; keep orchestration readable.
3. Preserve coverage for field grouping, note update, proxy, and card creation flows touched by the refactor.
4. Update docs or internal notes if the new structure changes where contributors should look for a given behavior.
<!-- SECTION:PLAN:END -->

View File

@@ -0,0 +1,51 @@
---
id: TASK-87.7
title: >-
Developer workflow hygiene: make docs watch reproducible and remove stale
small-surface drift
status: To Do
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
labels:
- tooling
- tech-debt
milestone: m-0
dependencies: []
references:
- package.json
- bun.lock
- src/anki-integration/field-grouping-workflow.ts
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
parent_task_id: TASK-87
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The review found a few low-risk but recurring hygiene issues: docs:watch depends on bunx concurrently even though concurrently is not declared in package metadata, and small stale API surface remains after recent refactors, such as unused parameters in field-grouping workflow code. This task should make the developer workflow reproducible and clean up low-risk stale symbols that do not warrant a dedicated architecture task.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The docs:watch workflow runs through declared project tooling or is rewritten to avoid undeclared dependencies.
- [ ] #2 Small stale symbols or parameters identified during the review outside the main composition-root cleanup are removed without behavior changes.
- [ ] #3 Any contributor-facing command changes are reflected in repository documentation.
- [ ] #4 The cleanup remains scoped to low-risk workflow and hygiene fixes rather than expanding into large architectural refactors.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Fix the docs:watch workflow so it relies on declared project tooling or an equivalent checked-in command path.
2. Clean up low-risk stale symbols surfaced by the review outside the main.ts architecture task, such as unused parameters left behind by refactors.
3. Keep the task scoped: avoid pulling in main composition-root cleanup or larger Anki/runtime refactors.
4. Verify the affected developer commands still work and document any usage changes.
<!-- SECTION:PLAN:END -->

View File

@@ -88,6 +88,7 @@
"alass_path": "", // Alass path setting.
"ffsubsync_path": "", // Ffsubsync path setting.
"ffmpeg_path": "", // Ffmpeg path setting.
"replace": true, // Replace active subtitle file when synchronization succeeds.
}, // Subsync engine and executable paths.
// ==========================================

View File

@@ -188,7 +188,9 @@ export default {
});
};
onMounted(render);
onMounted(() => {
render();
});
watch(() => route.path, render);
},
};

View File

@@ -44,12 +44,15 @@ Polling mode uses the query `"deck:<your-deck>" added:1` to find recently added
Then point Yomitan/clients to `http://127.0.0.1:8766` instead of `8765`.
When SubMiner loads the bundled Yomitan extension, it also attempts to update the **default Yomitan profile** (`profiles[0].options.anki.server`) to the active SubMiner endpoint:
When SubMiner loads the bundled Yomitan extension, it also attempts to update the **active bundled Yomitan profile** (`profiles[profileCurrent].options.anki.server`) to the active SubMiner endpoint:
- proxy URL when `ankiConnect.proxy.enabled` is `true`
- direct `ankiConnect.url` when proxy mode is disabled
To avoid clobbering custom setups, this auto-update only changes the default profile when its current server is blank or the stock Yomitan default (`http://127.0.0.1:8765`).
Server update behavior differs by mode:
- Proxy mode (`ankiConnect.proxy.enabled: true`): SubMiner force-syncs the bundled active profile to the proxy URL so `addNote` traffic goes through the local proxy and auto-enrichment can trigger.
- Direct mode (`ankiConnect.proxy.enabled: false`): SubMiner only replaces blank/default server values (`http://127.0.0.1:8765`) to avoid overwriting custom direct-server setups.
For browser-based Yomitan or other external clients (for example Texthooker in a normal browser profile), set their Anki server to the same proxy URL separately: `http://127.0.0.1:8766` (or your configured `proxy.host` + `proxy.port`).
@@ -69,7 +72,7 @@ In Yomitan, go to Settings → Profile and:
3. Set server to `http://127.0.0.1:8766` (or your configured proxy URL).
4. Save and make that profile active when using SubMiner.
This is only for non-bundled, external/browser Yomitan or other clients. The bundled profile auto-update logic only targets `profiles[0]` when it is blank or still default.
This is only for non-bundled, external/browser Yomitan or other clients. Bundled Yomitan profile sync behavior is described above (force-sync in proxy mode, conservative sync in direct mode).
### Proxy Troubleshooting (quick checks)

View File

@@ -125,8 +125,8 @@ Control the minimum log level for runtime output:
}
```
| Option | Values | Description |
| ------- | ----------------------------------- | ------------------------------------------------ |
| Option | Values | Description |
| ------- | ---------------------------------------- | --------------------------------------------------------- |
| `level` | `"debug"`, `"info"`, `"warn"`, `"error"` | Minimum log level for runtime logging (default: `"info"`) |
### Auto-Start Overlay
@@ -258,7 +258,8 @@ See `config.example.jsonc` for detailed configuration options.
| `backgroundColor` | string | Any CSS color, including `"transparent"` (default: `"rgb(30, 32, 48, 0.88)"`) |
| `enableJlpt` | boolean | Enable JLPT level underline styling (`false` by default) |
| `preserveLineBreaks` | boolean | Preserve line breaks in visible overlay subtitle rendering (`false` by default). Enable to mirror mpv line layout. |
| `autoPauseVideoOnHover` | boolean | Pause playback while mouse hovers subtitle text, then resume on leave (`true` by default). |
| `autoPauseVideoOnHover` | boolean | Pause playback while mouse hovers subtitle text; resume after leaving subtitle area (`true` by default). |
| `autoPauseVideoOnYomitanPopup` | boolean | Pause playback while Yomitan popup is open; resume when popup closes (`false` by default). |
| `hoverTokenColor` | string | Hex color used for hovered subtitle token highlight in mpv (default: catppuccin mauve) |
| `hoverTokenBackgroundColor` | string | CSS color used for hovered subtitle token background highlight (default: semi-transparent dark) |
| `frequencyDictionary.enabled` | boolean | Enable frequency highlighting from dictionary lookups (`false` by default) |
@@ -322,6 +323,7 @@ Set the initial vertical subtitle position (measured from the bottom of the scre
| Option | Values | Description |
| ---------- | ---------------- | ---------------------------------------------------------------------- |
| `yPercent` | number (0 - 100) | Distance from the bottom as a percent of screen height (default: `10`) |
In the overlay, you can fine-tune subtitle position at runtime with `Right-click + drag` on subtitle text.
### Secondary Subtitles
@@ -364,21 +366,23 @@ See `config.example.jsonc` for detailed configuration options and more examples.
**Default keybindings:**
| Key | Command | Description |
| ----------------- | ---------------------------- | ------------------------------------- |
| `Space` | `["cycle", "pause"]` | Toggle pause |
| `KeyJ` | `["cycle", "sid"]` | Cycle primary subtitle track |
| `Shift+KeyJ` | `["cycle", "secondary-sid"]` | Cycle secondary subtitle track |
| `ArrowRight` | `["seek", 5]` | Seek forward 5 seconds |
| `ArrowLeft` | `["seek", -5]` | Seek backward 5 seconds |
| `ArrowUp` | `["seek", 60]` | Seek forward 60 seconds |
| `ArrowDown` | `["seek", -60]` | Seek backward 60 seconds |
| `Shift+KeyH` | `["sub-seek", -1]` | Jump to previous subtitle |
| `Shift+KeyL` | `["sub-seek", 1]` | Jump to next subtitle |
| `Ctrl+Shift+KeyH` | `["__replay-subtitle"]` | Replay current subtitle, pause at end |
| `Ctrl+Shift+KeyL` | `["__play-next-subtitle"]` | Play next subtitle, pause at end |
| `KeyQ` | `["quit"]` | Quit mpv |
| `Ctrl+KeyW` | `["quit"]` | Quit mpv |
| Key | Command | Description |
| -------------------- | ---------------------------- | ------------------------------------- |
| `Space` | `["cycle", "pause"]` | Toggle pause |
| `KeyJ` | `["cycle", "sid"]` | Cycle primary subtitle track |
| `Shift+KeyJ` | `["cycle", "secondary-sid"]` | Cycle secondary subtitle track |
| `ArrowRight` | `["seek", 5]` | Seek forward 5 seconds |
| `ArrowLeft` | `["seek", -5]` | Seek backward 5 seconds |
| `ArrowUp` | `["seek", 60]` | Seek forward 60 seconds |
| `ArrowDown` | `["seek", -60]` | Seek backward 60 seconds |
| `Shift+KeyH` | `["sub-seek", -1]` | Jump to previous subtitle |
| `Shift+KeyL` | `["sub-seek", 1]` | Jump to next subtitle |
| `Shift+BracketLeft` | `["__sub-delay-prev-line"]` | Shift subtitle delay to previous cue |
| `Shift+BracketRight` | `["__sub-delay-next-line"]` | Shift subtitle delay to next cue |
| `Ctrl+Shift+KeyH` | `["__replay-subtitle"]` | Replay current subtitle, pause at end |
| `Ctrl+Shift+KeyL` | `["__play-next-subtitle"]` | Play next subtitle, pause at end |
| `KeyQ` | `["quit"]` | Quit mpv |
| `Ctrl+KeyW` | `["quit"]` | Quit mpv |
**Custom keybindings example:**
@@ -402,11 +406,11 @@ See `config.example.jsonc` for detailed configuration options and more examples.
{ "key": "Space", "command": null }
```
**Special commands:** Commands prefixed with `__` are handled internally by the overlay rather than sent to mpv. `__replay-subtitle` replays the current subtitle and pauses at its end. `__play-next-subtitle` seeks to the next subtitle, plays it, and pauses at its end. `__runtime-options-open` opens the runtime options palette. `__runtime-option-cycle:<id>[:next|prev]` cycles a runtime option value.
**Special commands:** Commands prefixed with `__` are handled internally by the overlay rather than sent to mpv. `__replay-subtitle` replays the current subtitle and pauses at its end. `__play-next-subtitle` seeks to the next subtitle, plays it, and pauses at its end. `__sub-delay-next-line` shifts subtitle delay so the active line aligns to the next cue start in the active subtitle source. `__sub-delay-prev-line` shifts subtitle delay so the active line aligns to the previous cue start. `__runtime-options-open` opens the runtime options palette. `__runtime-option-cycle:<id>[:next|prev]` cycles a runtime option value.
**Supported commands:** Any valid mpv JSON IPC command array (`["cycle", "pause"]`, `["seek", 5]`, `["script-binding", "..."]`, etc.)
For subtitle-position and subtitle-track proxy commands (`sub-pos`, `sid`, `secondary-sid`), SubMiner also shows an mpv OSD notification after the command runs.
For subtitle-position and subtitle-track proxy commands (`sub-pos`, `sid`, `secondary-sid`) and subtitle delay commands (`sub-delay`), SubMiner also shows an mpv OSD notification after the command runs.
**See `config.example.jsonc`** for more keybinding examples and configuration options.
@@ -768,17 +772,19 @@ Sync the active subtitle track using `alass` (preferred) or `ffsubsync`:
"defaultMode": "auto",
"alass_path": "",
"ffsubsync_path": "",
"ffmpeg_path": ""
"ffmpeg_path": "",
"replace": true
}
}
```
| Option | Values | Description |
| ---------------- | -------------------- | ----------------------------------------------------------------------------------------------------------- |
| `defaultMode` | `"auto"`, `"manual"` | `auto`: try `alass` against secondary subtitle, then fallback to `ffsubsync`; `manual`: open overlay picker |
| `alass_path` | string path | Path to `alass` executable. Empty or `null` falls back to `/usr/bin/alass`. |
| `ffsubsync_path` | string path | Path to `ffsubsync` executable. Empty or `null` falls back to `/usr/bin/ffsubsync`. |
| `ffmpeg_path` | string path | Path to `ffmpeg` (used for internal subtitle extraction). Empty or `null` falls back to `/usr/bin/ffmpeg`. |
| Option | Values | Description |
| ---------------- | -------------------- | ------------------------------------------------------------------------------------------------------------------------- |
| `defaultMode` | `"auto"`, `"manual"` | `auto`: try `alass` against secondary subtitle, then fallback to `ffsubsync`; `manual`: open overlay picker |
| `alass_path` | string path | Path to `alass` executable. Empty or `null` falls back to `/usr/bin/alass`. |
| `ffsubsync_path` | string path | Path to `ffsubsync` executable. Empty or `null` falls back to `/usr/bin/ffsubsync`. |
| `ffmpeg_path` | string path | Path to `ffmpeg` (used for internal subtitle extraction). Empty or `null` falls back to `/usr/bin/ffmpeg`. |
| `replace` | `true`, `false` | When `true` (default), overwrite the active subtitle file on successful sync. When `false`, write `<name>_retimed.<ext>`. |
Default trigger is `Ctrl+Alt+S` via `shortcuts.triggerSubsync`.
Customize it there, or set it to `null` to disable.

View File

@@ -79,18 +79,18 @@ Use `subminer <subcommand> -h` for command-specific help.
## Options
| Flag | Description |
| ----------------------- | --------------------------------------------------- |
| `-d, --directory` | Video search directory (default: cwd) |
| `-r, --recursive` | Search directories recursively |
| `-R, --rofi` | Use rofi instead of fzf |
| `--start` | Explicitly start overlay after mpv launches |
| `-S, --start-overlay` | Explicitly start overlay after mpv launches |
| `-T, --no-texthooker` | Disable texthooker server |
| `-p, --profile` | mpv profile name (default: `subminer`) |
| `-b, --backend` | Force window backend (`hyprland`, `sway`, `x11`) |
| `--log-level` | Logger verbosity (`debug`, `info`, `warn`, `error`) |
| `--dev`, `--debug` | Enable app dev-mode (not tied to log level) |
| Flag | Description |
| --------------------- | --------------------------------------------------- |
| `-d, --directory` | Video search directory (default: cwd) |
| `-r, --recursive` | Search directories recursively |
| `-R, --rofi` | Use rofi instead of fzf |
| `--start` | Explicitly start overlay after mpv launches |
| `-S, --start-overlay` | Explicitly start overlay after mpv launches |
| `-T, --no-texthooker` | Disable texthooker server |
| `-p, --profile` | mpv profile name (default: `subminer`) |
| `-b, --backend` | Force window backend (`hyprland`, `sway`, `x11`) |
| `--log-level` | Logger verbosity (`debug`, `info`, `warn`, `error`) |
| `--dev`, `--debug` | Enable app dev-mode (not tied to log level) |
With default plugin settings (`auto_start=yes`, `auto_start_visible_overlay=yes`, `auto_start_pause_until_ready=yes`), explicit start flags are usually unnecessary.

View File

@@ -34,6 +34,7 @@ The visible overlay renders subtitles as tokenized, clickable word spans. Each w
- Word-level click targets for Yomitan lookup
- Auto pause/resume on subtitle hover (enabled by default via `subtitleStyle.autoPauseVideoOnHover`)
- Optional auto-pause while Yomitan popup is open (`subtitleStyle.autoPauseVideoOnYomitanPopup`)
- Right-click to pause/resume
- Right-click + drag to reposition subtitles
- Modal dialogs for Jimaku search, field grouping, subsync, and runtime options

View File

@@ -120,27 +120,28 @@ aniskip_button_duration=3
### Option Reference
| Option | Default | Values | Description |
| ---------------------------- | ----------------------------- | ------------------------------------------ | ---------------------------------------------------------------------- |
| `binary_path` | `""` (auto-detect) | file path | Path to SubMiner binary |
| `socket_path` | `/tmp/subminer-socket` | file path | MPV IPC socket path |
| `texthooker_enabled` | `yes` | `yes` / `no` | Enable texthooker server |
| `texthooker_port` | `5174` | 165535 | Texthooker server port |
| `backend` | `auto` | `auto`, `hyprland`, `sway`, `x11`, `macos` | Window manager backend |
| `auto_start` | `yes` | `yes` / `no` | Auto-start overlay on file load when mpv socket matches `socket_path` |
| `auto_start_visible_overlay` | `yes` | `yes` / `no` | Show visible layer on auto-start when mpv socket matches `socket_path` |
| `auto_start_pause_until_ready` | `yes` | `yes` / `no` | Pause mpv on visible auto-start; resume when SubMiner signals tokenization-ready |
| `osd_messages` | `yes` | `yes` / `no` | Show OSD status messages |
| `log_level` | `info` | `debug`, `info`, `warn`, `error` | Log verbosity |
| `aniskip_enabled` | `yes` | `yes` / `no` | Enable AniSkip intro detection |
| `aniskip_title` | `""` | string | Override title used for lookup |
| `aniskip_season` | `""` | numeric season | Optional season hint |
| `aniskip_mal_id` | `""` | numeric MAL id | Skip title lookup; use fixed id |
| `aniskip_episode` | `""` | numeric episode | Skip episode parsing; use fixed |
| `aniskip_show_button` | `yes` | `yes` / `no` | Show in-range intro skip prompt |
| `aniskip_button_text` | `You can skip by pressing %s` | string | OSD prompt format (`%s`=key) |
| `aniskip_button_key` | `y-k` | mpv key chord | Primary key for intro skip action (`y-k` always works as fallback) |
| `aniskip_button_duration` | `3` | float seconds | OSD hint duration |
| Option | Default | Values | Description |
| ------------------------------ | ----------------------------- | ------------------------------------------ | ------------------------------------------------------------------------------------------ |
| `binary_path` | `""` (auto-detect) | file path | Path to SubMiner binary |
| `socket_path` | `/tmp/subminer-socket` | file path | MPV IPC socket path |
| `texthooker_enabled` | `yes` | `yes` / `no` | Enable texthooker server |
| `texthooker_port` | `5174` | 165535 | Texthooker server port |
| `backend` | `auto` | `auto`, `hyprland`, `sway`, `x11`, `macos` | Window manager backend |
| `auto_start` | `yes` | `yes` / `no` | Auto-start overlay on file load when mpv socket matches `socket_path` |
| `auto_start_visible_overlay` | `yes` | `yes` / `no` | Show visible layer on auto-start when mpv socket matches `socket_path` |
| `auto_start_pause_until_ready` | `yes` | `yes` / `no` | Pause mpv on visible auto-start; resume when SubMiner signals tokenization-ready |
| `osd_messages` | `yes` | `yes` / `no` | Show OSD status messages |
| `log_level` | `info` | `debug`, `info`, `warn`, `error` | Log verbosity |
| `aniskip_enabled` | `yes` | `yes` / `no` | Enable AniSkip intro detection |
| `aniskip_title` | `""` | string | Override title used for lookup |
| `aniskip_season` | `""` | numeric season | Optional season hint |
| `aniskip_mal_id` | `""` | numeric MAL id | Skip title lookup; use fixed id |
| `aniskip_episode` | `""` | numeric episode | Skip episode parsing; use fixed |
| `aniskip_payload` | `""` | JSON / base64-encoded JSON | Optional pre-fetched AniSkip payload for this media. When set, plugin skips network lookup |
| `aniskip_show_button` | `yes` | `yes` / `no` | Show in-range intro skip prompt |
| `aniskip_button_text` | `You can skip by pressing %s` | string | OSD prompt format (`%s`=key) |
| `aniskip_button_key` | `y-k` | mpv key chord | Primary key for intro skip action (`y-k` always works as fallback) |
| `aniskip_button_duration` | `3` | float seconds | OSD hint duration |
## Binary Auto-Detection
@@ -208,7 +209,8 @@ script-message subminer-start backend=hyprland socket=/custom/path texthooker=no
- You explicitly call `script-message subminer-aniskip-refresh`.
- Lookups are asynchronous (no blocking `ps`/`curl` on `file-loaded`).
- MAL/title resolution is cached for the current mpv session.
- When launched via `subminer`, launcher runs `guessit` first (file targets) and passes title/season/episode to the plugin; fallback is filename-derived title.
- When launched via `subminer`, launcher can pass `aniskip_payload` (pre-fetched AniSkip `skip-times` payload) and the plugin applies it directly without making API calls.
- If the payload is absent or invalid, lookup falls back to title/MAL-based async fetch.
- Install `guessit` for best detection quality (`python3 -m pip install --user guessit`).
- If OP interval exists, plugin adds `AniSkip Intro Start` and `AniSkip Intro End` chapters.
- At intro start, plugin shows an OSD hint for the first 3 seconds (`You can skip by pressing y-k` by default).

15
docs/plausible.test.ts Normal file
View File

@@ -0,0 +1,15 @@
import { expect, test } from 'bun:test';
import { readFileSync } from 'node:fs';
const docsThemePath = new URL('./.vitepress/theme/index.ts', import.meta.url);
const docsThemeContents = readFileSync(docsThemePath, 'utf8');
test('docs theme has no plausible analytics wiring', () => {
expect(docsThemeContents).not.toContain('@plausible-analytics/tracker');
expect(docsThemeContents).not.toContain('initPlausibleTracker');
expect(docsThemeContents).not.toContain('worker.subminer.moe');
expect(docsThemeContents).not.toContain('domain:');
expect(docsThemeContents).not.toContain('outboundLinks: true');
expect(docsThemeContents).not.toContain('fileDownloads: true');
expect(docsThemeContents).not.toContain('formSubmissions: true');
});

View File

@@ -88,6 +88,7 @@
"alass_path": "", // Alass path setting.
"ffsubsync_path": "", // Ffsubsync path setting.
"ffmpeg_path": "", // Ffmpeg path setting.
"replace": true, // Replace active subtitle file when synchronization succeeds.
}, // Subsync engine and executable paths.
// ==========================================
@@ -106,7 +107,8 @@
"subtitleStyle": {
"enableJlpt": false, // Enable JLPT vocabulary level underlines. When disabled, JLPT tagging lookup and underlines are skipped. Values: true | false
"preserveLineBreaks": false, // Preserve line breaks in visible overlay subtitle rendering. When false, line breaks are flattened to spaces for a single-line flow. Values: true | false
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text, then resume on leave. Values: true | false
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text; resume after leaving subtitle area. Values: true | false
"autoPauseVideoOnYomitanPopup": false, // Automatically pause mpv playback while Yomitan popup is open; resume when popup closes. Values: true | false
"hoverTokenColor": "#f4dbd6", // Hex color used for hovered subtitle token highlight in mpv.
"hoverTokenBackgroundColor": "rgba(54, 58, 79, 0.84)", // CSS color used for hovered subtitle token background highlight in mpv.
"fontFamily": "M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP", // Font family setting.

View File

@@ -46,6 +46,8 @@ These control playback and subtitle display. They require overlay window focus.
| `ArrowDown` | Seek backward 60 seconds |
| `Shift+H` | Jump to previous subtitle |
| `Shift+L` | Jump to next subtitle |
| `Shift+[` | Shift subtitle delay to previous subtitle cue |
| `Shift+]` | Shift subtitle delay to next subtitle cue |
| `Ctrl+Shift+H` | Replay current subtitle (play to end, then pause) |
| `Ctrl+Shift+L` | Play next subtitle (jump, play to end, then pause) |
| `Q` | Quit mpv |
@@ -56,7 +58,32 @@ These control playback and subtitle display. They require overlay window focus.
These keybindings can be overridden or disabled via the `keybindings` config array.
Mouse-hover playback behavior is configured separately from shortcuts: `subtitleStyle.autoPauseVideoOnHover` defaults to `true` (pause on subtitle hover, resume on leave).
Mouse-hover playback behavior is configured separately from shortcuts: `subtitleStyle.autoPauseVideoOnHover` defaults to `true` (pause on subtitle hover; resume after leaving subtitle area). Optional popup behavior: set `subtitleStyle.autoPauseVideoOnYomitanPopup` to `true` to keep playback paused while Yomitan popup is open.
When a Yomitan popup is open, SubMiner also provides popup control shortcuts:
| Shortcut | Action |
| ----------- | ----------------------------------------------- |
| `J` | Scroll definitions down |
| `K` | Scroll definitions up |
| `ArrowDown` | Scroll definitions down |
| `ArrowUp` | Scroll definitions up |
| `M` | Mine/add selected term |
| `P` | Play selected term audio |
| `[` | Play previous available audio (selected source) |
| `]` | Play next available audio (selected source) |
## Keyboard-Driven Lookup Mode
These shortcuts are fixed (not configurable) and require overlay focus.
| Shortcut | Action |
| ------------------------------ | -------------------------------------------------------------------------------------------- |
| `Ctrl/Cmd+Shift+Y` | Toggle keyboard-driven token selection mode on/off |
| `Ctrl/Cmd+Y` | Toggle lookup popup for selected token (open when closed, close when open) |
| `ArrowLeft/Right`, `H`, or `L` | Move selected token (previous/next); if lookup is open, refresh definition for the new token |
Keyboard-driven mode draws a selection outline around the active token. Use `Ctrl/Cmd+Y` to open or close lookup for that token. While the popup is open, popup-local controls still work from the overlay (`J/K`, `ArrowUp/ArrowDown`, `M`, `P`, `[`, `]`) and focus is forced back to the overlay so token navigation can continue without clicking subtitle text again. Moving left/right past the start or end of the line jumps to the previous or next subtitle line and keeps playback paused if it was already paused.
## Subtitle & Feature Shortcuts

View File

@@ -209,7 +209,13 @@ Notes:
These keybindings only work when the overlay window has focus. See [Configuration](/configuration) for customization.
By default, hovering over subtitle text pauses mpv playback and leaving the subtitle area resumes playback. Set `subtitleStyle.autoPauseVideoOnHover` to `false` to disable this behavior.
By default, hovering over subtitle text pauses mpv playback. Playback resumes as soon as the cursor leaves subtitle text. Set `subtitleStyle.autoPauseVideoOnHover` to `false` to disable this behavior.
If you want playback to stay paused while a Yomitan popup is open, set `subtitleStyle.autoPauseVideoOnYomitanPopup` to `true`. When enabled, SubMiner auto-resumes on popup close only if SubMiner paused playback for that popup.
Keyboard-driven lookup mode is available with fixed shortcuts: `Ctrl/Cmd+Shift+Y` toggles token-selection mode, `ArrowLeft/Right` (or `H/L`) moves the selected token, and `Ctrl/Cmd+Y` opens or closes lookup for that token.
If the Yomitan popup is open, you can control it directly from the overlay without moving focus into the popup: `J/K` or `ArrowUp/ArrowDown` scroll definitions, `M` mines/adds the selected term, `P` plays term audio, `[` plays the previous available audio, and `]` plays the next available audio in the selected source. While lookup stays open, `ArrowLeft/Right` (or `H/L`) moves to the previous or next token and refreshes the definition for the new token. If you move past the start or end of the current subtitle line, SubMiner jumps to the previous or next subtitle line, moves the selector to the edge token on that line, and keeps playback paused if it was already paused.
### Drag-and-drop Queueing

View File

@@ -4,8 +4,38 @@ import {
inferAniSkipMetadataForFile,
buildSubminerScriptOpts,
parseAniSkipGuessitJson,
resolveAniSkipMetadataForFile,
} from './aniskip-metadata';
function makeMockResponse(payload: unknown): Response {
return {
ok: true,
status: 200,
json: async () => payload,
} as Response;
}
function normalizeFetchInput(input: string | URL | Request): string {
if (typeof input === 'string') return input;
if (input instanceof URL) return input.toString();
return input.url;
}
async function withMockFetch(
handler: (input: string | URL | Request) => Promise<Response>,
fn: () => Promise<void>,
): Promise<void> {
const original = globalThis.fetch;
(globalThis as { fetch: typeof fetch }).fetch = (async (input: string | URL | Request) => {
return handler(input);
}) as typeof fetch;
try {
await fn();
} finally {
(globalThis as { fetch: typeof fetch }).fetch = original;
}
}
test('parseAniSkipGuessitJson extracts title season and episode', () => {
const parsed = parseAniSkipGuessitJson(
JSON.stringify({ title: 'My Show', season: 2, episode: 7 }),
@@ -16,6 +46,10 @@ test('parseAniSkipGuessitJson extracts title season and episode', () => {
season: 2,
episode: 7,
source: 'guessit',
malId: null,
introStart: null,
introEnd: null,
lookupStatus: 'lookup_failed',
});
});
@@ -34,6 +68,10 @@ test('parseAniSkipGuessitJson prefers series over episode title', () => {
season: 1,
episode: 10,
source: 'guessit',
malId: null,
introStart: null,
introEnd: null,
lookupStatus: 'lookup_failed',
});
});
@@ -60,16 +98,80 @@ test('inferAniSkipMetadataForFile falls back to anime directory title when filen
assert.equal(parsed.source, 'fallback');
});
test('buildSubminerScriptOpts includes aniskip metadata fields', () => {
test('resolveAniSkipMetadataForFile resolves MAL id and intro payload', async () => {
await withMockFetch(
async (input) => {
const url = normalizeFetchInput(input);
if (url.includes('myanimelist.net/search/prefix.json')) {
return makeMockResponse({
categories: [
{
items: [
{ id: '9876', name: 'Wrong Match' },
{ id: '1234', name: 'My Show' },
],
},
],
});
}
if (url.includes('api.aniskip.com/v1/skip-times/1234/7')) {
return makeMockResponse({
found: true,
results: [{ skip_type: 'op', interval: { start_time: 12.5, end_time: 54.2 } }],
});
}
throw new Error(`unexpected url: ${url}`);
},
async () => {
const resolved = await resolveAniSkipMetadataForFile('/media/Anime.My.Show.S01E07.mkv');
assert.equal(resolved.malId, 1234);
assert.equal(resolved.introStart, 12.5);
assert.equal(resolved.introEnd, 54.2);
assert.equal(resolved.lookupStatus, 'ready');
assert.equal(resolved.title, 'Anime My Show');
},
);
});
test('resolveAniSkipMetadataForFile emits missing_mal_id when MAL search misses', async () => {
await withMockFetch(
async () => makeMockResponse({ categories: [] }),
async () => {
const resolved = await resolveAniSkipMetadataForFile('/media/NopeShow.S01E03.mkv');
assert.equal(resolved.malId, null);
assert.equal(resolved.lookupStatus, 'missing_mal_id');
},
);
});
test('buildSubminerScriptOpts includes aniskip payload fields', () => {
const opts = buildSubminerScriptOpts('/tmp/SubMiner.AppImage', '/tmp/subminer.sock', {
title: "Frieren: Beyond Journey's End",
season: 1,
episode: 5,
source: 'guessit',
malId: 1234,
introStart: 30.5,
introEnd: 62,
lookupStatus: 'ready',
});
const payloadMatch = opts.match(/subminer-aniskip_payload=([^,]+)/);
assert.match(opts, /subminer-binary_path=\/tmp\/SubMiner\.AppImage/);
assert.match(opts, /subminer-socket_path=\/tmp\/subminer\.sock/);
assert.match(opts, /subminer-aniskip_title=Frieren: Beyond Journey's End/);
assert.match(opts, /subminer-aniskip_season=1/);
assert.match(opts, /subminer-aniskip_episode=5/);
assert.match(opts, /subminer-aniskip_mal_id=1234/);
assert.match(opts, /subminer-aniskip_intro_start=30.5/);
assert.match(opts, /subminer-aniskip_intro_end=62/);
assert.match(opts, /subminer-aniskip_lookup_status=ready/);
assert.ok(payloadMatch !== null);
assert.equal(payloadMatch[1].includes('%'), false);
const payloadJson = Buffer.from(payloadMatch[1], 'base64url').toString('utf-8');
const payload = JSON.parse(payloadJson);
assert.equal(payload.found, true);
const first = payload.results?.[0];
assert.equal(first.skip_type, 'op');
assert.equal(first.interval.start_time, 30.5);
assert.equal(first.interval.end_time, 62);
});

View File

@@ -2,11 +2,22 @@ import path from 'node:path';
import { spawnSync } from 'node:child_process';
import { commandExists } from './util.js';
export type AniSkipLookupStatus =
| 'ready'
| 'missing_mal_id'
| 'missing_episode'
| 'missing_payload'
| 'lookup_failed';
export interface AniSkipMetadata {
title: string;
season: number | null;
episode: number | null;
source: 'guessit' | 'fallback';
malId: number | null;
introStart: number | null;
introEnd: number | null;
lookupStatus?: AniSkipLookupStatus;
}
interface InferAniSkipDeps {
@@ -14,6 +25,50 @@ interface InferAniSkipDeps {
runGuessit: (mediaPath: string) => string | null;
}
interface MalSearchResult {
id?: unknown;
name?: unknown;
}
interface MalSearchCategory {
items?: unknown;
}
interface MalSearchResponse {
categories?: unknown;
}
interface AniSkipIntervalPayload {
start_time?: unknown;
end_time?: unknown;
}
interface AniSkipSkipItemPayload {
skip_type?: unknown;
interval?: unknown;
}
interface AniSkipPayloadResponse {
found?: unknown;
results?: unknown;
}
const MAL_PREFIX_API = 'https://myanimelist.net/search/prefix.json?type=anime&keyword=';
const ANISKIP_PAYLOAD_API = 'https://api.aniskip.com/v1/skip-times/';
const MAL_USER_AGENT = 'SubMiner-launcher/ani-skip';
const MAL_MATCH_STOPWORDS = new Set([
'the',
'this',
'that',
'world',
'animated',
'series',
'season',
'no',
'on',
'and',
]);
function toPositiveInt(value: unknown): number | null {
if (typeof value === 'number' && Number.isFinite(value) && value > 0) {
return Math.floor(value);
@@ -27,6 +82,227 @@ function toPositiveInt(value: unknown): number | null {
return null;
}
function toPositiveNumber(value: unknown): number | null {
if (typeof value === 'number' && Number.isFinite(value) && value > 0) {
return value;
}
if (typeof value === 'string') {
const parsed = Number.parseFloat(value);
if (Number.isFinite(parsed) && parsed > 0) {
return parsed;
}
}
return null;
}
function normalizeForMatch(value: string): string {
return value
.toLowerCase()
.replace(/[^\w]+/g, ' ')
.replace(/\s+/g, ' ')
.trim();
}
function tokenizeMatchWords(value: string): string[] {
const words = normalizeForMatch(value)
.split(' ')
.filter((word) => word.length >= 3);
return words.filter((word) => !MAL_MATCH_STOPWORDS.has(word));
}
function titleOverlapScore(expectedTitle: string, candidateTitle: string): number {
const expected = normalizeForMatch(expectedTitle);
const candidate = normalizeForMatch(candidateTitle);
if (!expected || !candidate) return 0;
if (candidate.includes(expected)) return 120;
const expectedTokens = tokenizeMatchWords(expectedTitle);
if (expectedTokens.length === 0) return 0;
const candidateSet = new Set(tokenizeMatchWords(candidateTitle));
let score = 0;
let matched = 0;
for (const token of expectedTokens) {
if (candidateSet.has(token)) {
score += 30;
matched += 1;
} else {
score -= 20;
}
}
if (matched === 0) {
score -= 80;
}
const coverage = matched / expectedTokens.length;
if (expectedTokens.length >= 2) {
if (coverage >= 0.8) score += 30;
else if (coverage >= 0.6) score += 10;
else score -= 50;
} else if (coverage >= 1) {
score += 10;
}
return score;
}
function hasAnySequelMarker(candidateTitle: string): boolean {
const normalized = ` ${normalizeForMatch(candidateTitle)} `;
if (!normalized.trim()) return false;
const markers = [
'season 2',
'season 3',
'season 4',
'2nd season',
'3rd season',
'4th season',
'second season',
'third season',
'fourth season',
' ii ',
' iii ',
' iv ',
];
return markers.some((marker) => normalized.includes(marker));
}
function seasonSignalScore(requestedSeason: number | null, candidateTitle: string): number {
const season = toPositiveInt(requestedSeason);
if (!season || season < 1) return 0;
const normalized = ` ${normalizeForMatch(candidateTitle)} `;
if (!normalized.trim()) return 0;
if (season === 1) {
return hasAnySequelMarker(candidateTitle) ? -60 : 20;
}
const numericMarker = ` season ${season} `;
const ordinalMarker = ` ${season}th season `;
if (normalized.includes(numericMarker) || normalized.includes(ordinalMarker)) {
return 40;
}
const romanAliases = {
2: [' ii ', ' second season ', ' 2nd season '],
3: [' iii ', ' third season ', ' 3rd season '],
4: [' iv ', ' fourth season ', ' 4th season '],
5: [' v ', ' fifth season ', ' 5th season '],
} as const;
const aliases = romanAliases[season] ?? [];
return aliases.some((alias) => normalized.includes(alias))
? 40
: hasAnySequelMarker(candidateTitle)
? -20
: 5;
}
function toMalSearchItems(payload: unknown): MalSearchResult[] {
const parsed = payload as MalSearchResponse;
const categories = Array.isArray(parsed?.categories) ? parsed.categories : null;
if (!categories) return [];
const items: MalSearchResult[] = [];
for (const category of categories) {
const typedCategory = category as MalSearchCategory;
const rawItems = Array.isArray(typedCategory?.items) ? typedCategory.items : [];
for (const rawItem of rawItems) {
const item = rawItem as Record<string, unknown>;
items.push({
id: item?.id,
name: item?.name,
});
}
}
return items;
}
function normalizeEpisodePayload(value: unknown): number | null {
return toPositiveNumber(value);
}
function parseAniSkipPayload(payload: unknown): { start: number; end: number } | null {
const parsed = payload as AniSkipPayloadResponse;
const results = Array.isArray(parsed?.results) ? parsed.results : null;
if (!results) return null;
for (const rawResult of results) {
const result = rawResult as AniSkipSkipItemPayload;
if (
result.skip_type !== 'op' ||
typeof result.interval !== 'object' ||
result.interval === null
) {
continue;
}
const interval = result.interval as AniSkipIntervalPayload;
const start = normalizeEpisodePayload(interval?.start_time);
const end = normalizeEpisodePayload(interval?.end_time);
if (start !== null && end !== null && end > start) {
return { start, end };
}
}
return null;
}
async function fetchJson<T>(url: string): Promise<T | null> {
const response = await fetch(url, {
headers: {
'User-Agent': MAL_USER_AGENT,
},
});
if (!response.ok) return null;
try {
return (await response.json()) as T;
} catch {
return null;
}
}
async function resolveMalIdFromTitle(title: string, season: number | null): Promise<number | null> {
const lookup = season && season > 1 ? `${title} Season ${season}` : title;
const payload = await fetchJson<unknown>(`${MAL_PREFIX_API}${encodeURIComponent(lookup)}`);
const items = toMalSearchItems(payload);
if (!items.length) return null;
let bestScore = Number.NEGATIVE_INFINITY;
let bestMalId: number | null = null;
for (const item of items) {
const id = toPositiveInt(item.id);
if (!id) continue;
const name = typeof item.name === 'string' ? item.name : '';
if (!name) continue;
const score = titleOverlapScore(title, name) + seasonSignalScore(season, name);
if (score > bestScore) {
bestScore = score;
bestMalId = id;
}
}
return bestMalId;
}
async function fetchAniSkipPayload(
malId: number,
episode: number,
): Promise<{ start: number; end: number } | null> {
const payload = await fetchJson<unknown>(
`${ANISKIP_PAYLOAD_API}${malId}/${episode}?types=op&types=ed`,
);
const parsed = payload as AniSkipPayloadResponse;
if (!parsed || parsed.found !== true) return null;
return parseAniSkipPayload(parsed);
}
function detectEpisodeFromName(baseName: string): number | null {
const patterns = [
/[Ss]\d+[Ee](\d{1,3})/,
@@ -133,6 +409,10 @@ export function parseAniSkipGuessitJson(stdout: string, mediaPath: string): AniS
season,
episode: episodeFromDirect ?? episodeFromList,
source: 'guessit',
malId: null,
introStart: null,
introEnd: null,
lookupStatus: 'lookup_failed',
};
} catch {
return null;
@@ -171,9 +451,70 @@ export function inferAniSkipMetadataForFile(
season: detectSeasonFromNameOrDir(mediaPath),
episode: detectEpisodeFromName(baseName),
source: 'fallback',
malId: null,
introStart: null,
introEnd: null,
lookupStatus: 'lookup_failed',
};
}
export async function resolveAniSkipMetadataForFile(mediaPath: string): Promise<AniSkipMetadata> {
const inferred = inferAniSkipMetadataForFile(mediaPath);
if (!inferred.title) {
return { ...inferred, lookupStatus: 'lookup_failed' };
}
try {
const malId = await resolveMalIdFromTitle(inferred.title, inferred.season);
if (!malId) {
return {
...inferred,
malId: null,
introStart: null,
introEnd: null,
lookupStatus: 'missing_mal_id',
};
}
if (!inferred.episode) {
return {
...inferred,
malId,
introStart: null,
introEnd: null,
lookupStatus: 'missing_episode',
};
}
const payload = await fetchAniSkipPayload(malId, inferred.episode);
if (!payload) {
return {
...inferred,
malId,
introStart: null,
introEnd: null,
lookupStatus: 'missing_payload',
};
}
return {
...inferred,
malId,
introStart: payload.start,
introEnd: payload.end,
lookupStatus: 'ready',
};
} catch {
return {
...inferred,
malId: inferred.malId,
introStart: inferred.introStart,
introEnd: inferred.introEnd,
lookupStatus: 'lookup_failed',
};
}
}
function sanitizeScriptOptValue(value: string): string {
return value
.replace(/,/g, ' ')
@@ -182,6 +523,30 @@ function sanitizeScriptOptValue(value: string): string {
.trim();
}
function buildLauncherAniSkipPayload(aniSkipMetadata: AniSkipMetadata): string | null {
if (!aniSkipMetadata.malId || !aniSkipMetadata.introStart || !aniSkipMetadata.introEnd) {
return null;
}
if (aniSkipMetadata.introEnd <= aniSkipMetadata.introStart) {
return null;
}
const payload = {
found: true,
results: [
{
skip_type: 'op',
interval: {
start_time: aniSkipMetadata.introStart,
end_time: aniSkipMetadata.introEnd,
},
},
],
};
// mpv --script-opts treats `%` as an escape prefix, so URL-encoding can break parsing.
// Base64url stays script-opts-safe and is decoded by the plugin launcher payload parser.
return Buffer.from(JSON.stringify(payload), 'utf-8').toString('base64url');
}
export function buildSubminerScriptOpts(
appPath: string,
socketPath: string,
@@ -200,5 +565,23 @@ export function buildSubminerScriptOpts(
if (aniSkipMetadata && aniSkipMetadata.episode && aniSkipMetadata.episode > 0) {
parts.push(`subminer-aniskip_episode=${aniSkipMetadata.episode}`);
}
if (aniSkipMetadata && aniSkipMetadata.malId && aniSkipMetadata.malId > 0) {
parts.push(`subminer-aniskip_mal_id=${aniSkipMetadata.malId}`);
}
if (aniSkipMetadata && aniSkipMetadata.introStart !== null && aniSkipMetadata.introStart > 0) {
parts.push(`subminer-aniskip_intro_start=${aniSkipMetadata.introStart}`);
}
if (aniSkipMetadata && aniSkipMetadata.introEnd !== null && aniSkipMetadata.introEnd > 0) {
parts.push(`subminer-aniskip_intro_end=${aniSkipMetadata.introEnd}`);
}
if (aniSkipMetadata?.lookupStatus) {
parts.push(
`subminer-aniskip_lookup_status=${sanitizeScriptOptValue(aniSkipMetadata.lookupStatus)}`,
);
}
const aniskipPayload = aniSkipMetadata ? buildLauncherAniSkipPayload(aniSkipMetadata) : null;
if (aniskipPayload) {
parts.push(`subminer-aniskip_payload=${sanitizeScriptOptValue(aniskipPayload)}`);
}
return parts.join(',');
}

View File

@@ -143,14 +143,10 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
pluginRuntimeConfig.autoStartPauseUntilReady;
if (shouldPauseUntilOverlayReady) {
log(
'info',
args.logLevel,
'Configured to pause mpv until overlay and tokenization are ready',
);
log('info', args.logLevel, 'Configured to pause mpv until overlay and tokenization are ready');
}
startMpv(
await startMpv(
selectedTarget.target,
selectedTarget.kind,
args,
@@ -198,11 +194,7 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
if (ready) {
log('info', args.logLevel, 'MPV IPC socket ready, relying on mpv plugin auto-start');
} else {
log(
'info',
args.logLevel,
'MPV IPC socket not ready yet, relying on mpv plugin auto-start',
);
log('info', args.logLevel, 'MPV IPC socket not ready yet, relying on mpv plugin auto-start');
}
} else if (ready) {
log(

View File

@@ -52,7 +52,10 @@ export function parsePluginRuntimeConfigContent(
continue;
}
if (key === 'auto_start_visible_overlay') {
runtimeConfig.autoStartVisibleOverlay = parseBooleanValue('auto_start_visible_overlay', value);
runtimeConfig.autoStartVisibleOverlay = parseBooleanValue(
'auto_start_visible_overlay',
value,
);
continue;
}
if (key === 'auto_start_pause_until_ready') {

View File

@@ -239,8 +239,7 @@ export function parseJellyfinPreviewAuthResponse(raw: string): JellyfinPreviewAu
const serverUrl = sanitizeServerUrl(
typeof candidate.serverUrl === 'string' ? candidate.serverUrl : '',
);
const accessToken =
typeof candidate.accessToken === 'string' ? candidate.accessToken.trim() : '';
const accessToken = typeof candidate.accessToken === 'string' ? candidate.accessToken.trim() : '';
const userId = typeof candidate.userId === 'string' ? candidate.userId.trim() : '';
if (!serverUrl || !accessToken) return null;
@@ -271,9 +270,7 @@ export function readUtf8FileAppendedSince(logPath: string, offsetBytes: number):
const buffer = fs.readFileSync(logPath);
if (buffer.length === 0) return '';
const normalizedOffset =
Number.isFinite(offsetBytes) && offsetBytes >= 0
? Math.floor(offsetBytes)
: 0;
Number.isFinite(offsetBytes) && offsetBytes >= 0 ? Math.floor(offsetBytes) : 0;
const startOffset = normalizedOffset > buffer.length ? 0 : normalizedOffset;
return buffer.subarray(startOffset).toString('utf8');
} catch {
@@ -399,7 +396,9 @@ async function runAppJellyfinCommand(
const hasCommandSignal = (output: string): boolean => {
if (label === 'jellyfin-libraries') {
return output.includes('Jellyfin library:') || output.includes('No Jellyfin libraries found.');
return (
output.includes('Jellyfin library:') || output.includes('No Jellyfin libraries found.')
);
}
if (label === 'jellyfin-items') {
return (
@@ -550,7 +549,9 @@ async function resolveJellyfinSelectionViaApp(
}
const configuredDefaultLibraryId = session.defaultLibraryId;
const hasConfiguredDefault = libraries.some((library) => library.id === configuredDefaultLibraryId);
const hasConfiguredDefault = libraries.some(
(library) => library.id === configuredDefaultLibraryId,
);
let libraryId = hasConfiguredDefault ? configuredDefaultLibraryId : '';
if (!libraryId) {
libraryId = pickLibrary(

View File

@@ -333,7 +333,10 @@ test('parseJellyfinErrorFromAppOutput extracts main runtime error lines', () =>
[subminer] - 2026-03-01 13:10:34 - ERROR - [main] runJellyfinCommand failed: {"message":"Missing Jellyfin password."}
`);
assert.equal(parsed, '[main] runJellyfinCommand failed: {"message":"Missing Jellyfin password."}');
assert.equal(
parsed,
'[main] runJellyfinCommand failed: {"message":"Missing Jellyfin password."}',
);
});
test('parseJellyfinPreviewAuthResponse parses valid structured response payload', () => {
@@ -385,7 +388,9 @@ test('shouldRetryWithStartForNoRunningInstance matches expected app lifecycle er
true,
);
assert.equal(
shouldRetryWithStartForNoRunningInstance('Missing Jellyfin session. Run --jellyfin-login first.'),
shouldRetryWithStartForNoRunningInstance(
'Missing Jellyfin session. Run --jellyfin-login first.',
),
false,
);
});
@@ -407,10 +412,13 @@ test('readUtf8FileAppendedSince treats offset as bytes and survives multibyte lo
});
test('parseEpisodePathFromDisplay extracts series and season from episode display titles', () => {
assert.deepEqual(parseEpisodePathFromDisplay('KONOSUBA S01E03 A Panty Treasure in This Right Hand!'), {
seriesName: 'KONOSUBA',
seasonNumber: 1,
});
assert.deepEqual(
parseEpisodePathFromDisplay('KONOSUBA S01E03 A Panty Treasure in This Right Hand!'),
{
seriesName: 'KONOSUBA',
seasonNumber: 1,
},
);
assert.deepEqual(parseEpisodePathFromDisplay('Frieren S2E10 Something'), {
seriesName: 'Frieren',
seasonNumber: 2,

View File

@@ -6,7 +6,7 @@ import { spawn, spawnSync } from 'node:child_process';
import type { LogLevel, Backend, Args, MpvTrack } from './types.js';
import { DEFAULT_MPV_SUBMINER_ARGS, DEFAULT_YOUTUBE_YTDL_FORMAT } from './types.js';
import { log, fail, getMpvLogPath } from './log.js';
import { buildSubminerScriptOpts, inferAniSkipMetadataForFile } from './aniskip-metadata.js';
import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js';
import {
commandExists,
isExecutable,
@@ -419,7 +419,7 @@ export async function loadSubtitleIntoMpv(
}
}
export function startMpv(
export async function startMpv(
target: string,
targetKind: 'file' | 'url',
args: Args,
@@ -479,7 +479,8 @@ export function startMpv(
if (options?.startPaused) {
mpvArgs.push('--pause=yes');
}
const aniSkipMetadata = targetKind === 'file' ? inferAniSkipMetadataForFile(target) : null;
const aniSkipMetadata =
targetKind === 'file' ? await resolveAniSkipMetadataForFile(target) : null;
const scriptOpts = buildSubminerScriptOpts(appPath, socketPath, aniSkipMetadata);
if (aniSkipMetadata) {
log(

View File

@@ -1,6 +1,6 @@
{
"name": "subminer",
"version": "0.2.1",
"version": "0.3.0",
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"packageManager": "bun@1.3.5",
"main": "dist/main-entry.js",
@@ -23,8 +23,8 @@
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
"test:subtitle:dist": "echo \"Subtitle tests are currently not configured\"",

View File

@@ -53,6 +53,9 @@ aniskip_mal_id=
# Force episode number (optional). Leave blank for filename/title detection.
aniskip_episode=
# Optional pre-fetched AniSkip payload for this media (JSON or base64 JSON). When set, the plugin uses this directly and skips network lookup.
aniskip_payload=
# Show intro skip OSD button while inside OP range.
aniskip_show_button=yes

View File

@@ -13,6 +13,12 @@ function M.create(ctx)
local mal_lookup_cache = {}
local payload_cache = {}
local title_context_cache = {}
local base64_reverse = {}
local base64_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
for i = 1, #base64_chars do
base64_reverse[base64_chars:sub(i, i)] = i - 1
end
local function url_encode(text)
if type(text) ~= "string" then
@@ -25,6 +31,109 @@ function M.create(ctx)
return encoded:gsub(" ", "%%20")
end
local function parse_json_payload(text)
if type(text) ~= "string" then
return nil
end
local parsed, parse_error = utils.parse_json(text)
if type(parsed) == "table" then
return parsed
end
return nil, parse_error
end
local function decode_base64(input)
if type(input) ~= "string" then
return nil
end
local cleaned = input:gsub("%s", ""):gsub("-", "+"):gsub("_", "/")
cleaned = cleaned:match("^%s*(.-)%s*$") or ""
if cleaned == "" then
return nil
end
if #cleaned % 4 == 1 then
return nil
end
if #cleaned % 4 ~= 0 then
cleaned = cleaned .. string.rep("=", 4 - (#cleaned % 4))
end
if not cleaned:match("^[A-Za-z0-9+/%=]+$") then
return nil
end
local out = {}
local out_len = 0
for index = 1, #cleaned, 4 do
local c1 = cleaned:sub(index, index)
local c2 = cleaned:sub(index + 1, index + 1)
local c3 = cleaned:sub(index + 2, index + 2)
local c4 = cleaned:sub(index + 3, index + 3)
local v1 = base64_reverse[c1]
local v2 = base64_reverse[c2]
if not v1 or not v2 then
return nil
end
local v3 = c3 == "=" and 0 or base64_reverse[c3]
local v4 = c4 == "=" and 0 or base64_reverse[c4]
if (c3 ~= "=" and not v3) or (c4 ~= "=" and not v4) then
return nil
end
local n = (((v1 * 64 + v2) * 64 + v3) * 64 + v4)
local b1 = math.floor(n / 65536)
local remaining = n % 65536
local b2 = math.floor(remaining / 256)
local b3 = remaining % 256
out_len = out_len + 1
out[out_len] = string.char(b1)
if c3 ~= "=" then
out_len = out_len + 1
out[out_len] = string.char(b2)
end
if c4 ~= "=" then
out_len = out_len + 1
out[out_len] = string.char(b3)
end
end
return table.concat(out)
end
local function resolve_launcher_payload()
local raw_payload = type(opts.aniskip_payload) == "string" and opts.aniskip_payload or ""
local trimmed = raw_payload:match("^%s*(.-)%s*$") or ""
if trimmed == "" then
return nil
end
local parsed, parse_error = parse_json_payload(trimmed)
if type(parsed) == "table" then
return parsed
end
local url_decoded = trimmed:gsub("%%(%x%x)", function(hex)
local value = tonumber(hex, 16)
if value then
return string.char(value)
end
return "%"
end)
if url_decoded ~= trimmed then
parsed, parse_error = parse_json_payload(url_decoded)
if type(parsed) == "table" then
return parsed
end
end
local b64_decoded = decode_base64(trimmed)
if type(b64_decoded) == "string" and b64_decoded ~= "" then
parsed, parse_error = parse_json_payload(b64_decoded)
if type(parsed) == "table" then
return parsed
end
end
subminer_log("warn", "aniskip", "Invalid launcher AniSkip payload: " .. tostring(parse_error or "unparseable"))
return nil
end
local function run_json_curl_async(url, callback)
mp.command_native_async({
name = "subprocess",
@@ -296,6 +405,8 @@ function M.create(ctx)
state.aniskip.episode = nil
state.aniskip.intro_start = nil
state.aniskip.intro_end = nil
state.aniskip.payload = nil
state.aniskip.payload_source = nil
remove_aniskip_chapters()
end
@@ -366,7 +477,17 @@ function M.create(ctx)
state.aniskip.intro_end = intro_end
state.aniskip.prompt_shown = false
set_intro_chapters(intro_start, intro_end)
subminer_log("info", "aniskip", string.format("Intro window %.3f -> %.3f (MAL %d, ep %d)", intro_start, intro_end, mal_id, episode))
subminer_log(
"info",
"aniskip",
string.format(
"Intro window %.3f -> %.3f (MAL %s, ep %s)",
intro_start,
intro_end,
tostring(mal_id or "-"),
tostring(episode or "-")
)
)
return true
end
end
@@ -374,6 +495,10 @@ function M.create(ctx)
return false
end
local function has_launcher_payload()
return type(opts.aniskip_payload) == "string" and opts.aniskip_payload:match("%S") ~= nil
end
local function is_launcher_context()
local forced_title = type(opts.aniskip_title) == "string" and (opts.aniskip_title:match("^%s*(.-)%s*$") or "") or ""
if forced_title ~= "" then
@@ -391,6 +516,9 @@ function M.create(ctx)
if forced_season and forced_season > 0 then
return true
end
if has_launcher_payload() then
return true
end
return false
end
@@ -500,6 +628,18 @@ function M.create(ctx)
end)
end
local function fetch_payload_from_launcher(payload, mal_id, title, episode)
if not payload then
return false
end
state.aniskip.payload = payload
state.aniskip.payload_source = "launcher"
state.aniskip.mal_id = mal_id
state.aniskip.title = title
state.aniskip.episode = episode
return apply_aniskip_payload(mal_id, title, episode, payload)
end
local function fetch_aniskip_for_current_media(trigger_source)
local trigger = type(trigger_source) == "string" and trigger_source or "manual"
if not opts.aniskip_enabled then
@@ -518,6 +658,28 @@ function M.create(ctx)
reset_aniskip_fields()
local title, episode, season = resolve_title_and_episode()
local lookup_titles = resolve_lookup_titles(title)
local launcher_payload = resolve_launcher_payload()
if launcher_payload then
local launcher_mal_id = tonumber(opts.aniskip_mal_id)
if not launcher_mal_id then
launcher_mal_id = nil
end
if fetch_payload_from_launcher(launcher_payload, launcher_mal_id, title, episode) then
subminer_log(
"info",
"aniskip",
string.format(
"Using launcher-provided AniSkip payload (title=%s, season=%s, episode=%s)",
tostring(title or ""),
tostring(season or "-"),
tostring(episode or "-")
)
)
return
end
subminer_log("info", "aniskip", "Launcher payload present but no OP interval was available")
return
end
subminer_log(
"info",
@@ -558,6 +720,8 @@ function M.create(ctx)
end
return
end
state.aniskip.payload = payload
state.aniskip.payload_source = "remote"
if not apply_aniskip_payload(mal_id, title, episode, payload) then
subminer_log("info", "aniskip", "AniSkip payload did not include OP interval")
end

View File

@@ -17,6 +17,7 @@ function M.load(options_lib, default_socket_path)
aniskip_season = "",
aniskip_mal_id = "",
aniskip_episode = "",
aniskip_payload = "",
aniskip_show_button = true,
aniskip_button_text = "You can skip by pressing %s",
aniskip_button_key = "y-k",

View File

@@ -3,6 +3,8 @@ local M = {}
local OVERLAY_START_RETRY_DELAY_SECONDS = 0.2
local OVERLAY_START_MAX_ATTEMPTS = 6
local AUTO_PLAY_READY_TIMEOUT_SECONDS = 15
local AUTO_PLAY_READY_LOADING_OSD = "Loading subtitle tokenization..."
local AUTO_PLAY_READY_READY_OSD = "Subtitle tokenization ready"
function M.create(ctx)
local mp = ctx.mp
@@ -14,6 +16,7 @@ function M.create(ctx)
local subminer_log = ctx.log.subminer_log
local show_osd = ctx.log.show_osd
local normalize_log_level = ctx.log.normalize_log_level
local run_control_command_async
local function resolve_visible_overlay_startup()
local raw_visible_overlay = opts.auto_start_visible_overlay
@@ -70,28 +73,50 @@ function M.create(ctx)
state.auto_play_ready_timeout = nil
end
local function disarm_auto_play_ready_gate()
local function clear_auto_play_ready_osd_timer()
local timer = state.auto_play_ready_osd_timer
if timer and timer.kill then
timer:kill()
end
state.auto_play_ready_osd_timer = nil
end
local function disarm_auto_play_ready_gate(options)
local should_resume = options == nil or options.resume_playback ~= false
local was_armed = state.auto_play_ready_gate_armed
clear_auto_play_ready_timeout()
clear_auto_play_ready_osd_timer()
state.auto_play_ready_gate_armed = false
if was_armed and should_resume then
mp.set_property_native("pause", false)
end
end
local function release_auto_play_ready_gate(reason)
if not state.auto_play_ready_gate_armed then
return
end
disarm_auto_play_ready_gate()
disarm_auto_play_ready_gate({ resume_playback = false })
mp.set_property_native("pause", false)
show_osd("Subtitle annotations loaded")
show_osd(AUTO_PLAY_READY_READY_OSD)
subminer_log("info", "process", "Resuming playback after startup gate: " .. tostring(reason or "ready"))
end
local function arm_auto_play_ready_gate()
if state.auto_play_ready_gate_armed then
clear_auto_play_ready_timeout()
clear_auto_play_ready_osd_timer()
end
state.auto_play_ready_gate_armed = true
mp.set_property_native("pause", true)
show_osd("Loading subtitle annotations...")
show_osd(AUTO_PLAY_READY_LOADING_OSD)
if type(mp.add_periodic_timer) == "function" then
state.auto_play_ready_osd_timer = mp.add_periodic_timer(2.5, function()
if state.auto_play_ready_gate_armed then
show_osd(AUTO_PLAY_READY_LOADING_OSD)
end
end)
end
subminer_log("info", "process", "Pausing playback until SubMiner overlay/tokenization readiness signal")
state.auto_play_ready_timeout = mp.add_timeout(AUTO_PLAY_READY_TIMEOUT_SECONDS, function()
if not state.auto_play_ready_gate_armed then
@@ -108,6 +133,11 @@ function M.create(ctx)
local function notify_auto_play_ready()
release_auto_play_ready_gate("tokenization-ready")
if state.overlay_running and resolve_visible_overlay_startup() then
run_control_command_async("show-visible-overlay", {
socket_path = opts.socket_path,
})
end
end
local function build_command_args(action, overrides)
@@ -132,22 +162,18 @@ function M.create(ctx)
table.insert(args, "--socket")
table.insert(args, socket_path)
-- Keep auto-start --start requests idempotent for second-instance handling.
-- Visibility is applied as a separate control command after startup.
if overrides.auto_start_trigger ~= true then
local should_show_visible = resolve_visible_overlay_startup()
if should_show_visible then
table.insert(args, "--show-visible-overlay")
else
table.insert(args, "--hide-visible-overlay")
end
local should_show_visible = resolve_visible_overlay_startup()
if should_show_visible then
table.insert(args, "--show-visible-overlay")
else
table.insert(args, "--hide-visible-overlay")
end
end
return args
end
local function run_control_command_async(action, overrides, callback)
run_control_command_async = function(action, overrides, callback)
local args = build_command_args(action, overrides)
subminer_log("debug", "process", "Control command: " .. table.concat(args, " "))
mp.command_native_async({
@@ -251,6 +277,24 @@ function M.create(ctx)
if state.overlay_running then
if overrides.auto_start_trigger == true then
subminer_log("debug", "process", "Auto-start ignored because overlay is already running")
local socket_path = overrides.socket_path or opts.socket_path
local should_pause_until_ready = (
resolve_visible_overlay_startup()
and resolve_pause_until_ready()
and has_matching_mpv_ipc_socket(socket_path)
)
if should_pause_until_ready then
arm_auto_play_ready_gate()
else
disarm_auto_play_ready_gate()
end
local visibility_action = resolve_visible_overlay_startup()
and "show-visible-overlay"
or "hide-visible-overlay"
run_control_command_async(visibility_action, {
socket_path = socket_path,
log_level = overrides.log_level,
})
return
end
subminer_log("info", "process", "Overlay already running")
@@ -287,7 +331,7 @@ function M.create(ctx)
)
end
if attempt == 1 then
if attempt == 1 and not state.auto_play_ready_gate_armed then
show_osd("Starting...")
end
state.overlay_running = true
@@ -319,9 +363,10 @@ function M.create(ctx)
local visibility_action = resolve_visible_overlay_startup()
and "show-visible-overlay"
or "hide-visible-overlay"
run_control_command_async(visibility_action, {
log_level = overrides.log_level,
})
run_control_command_async(visibility_action, {
socket_path = socket_path,
log_level = overrides.log_level,
})
end
end)

View File

@@ -24,11 +24,14 @@ function M.new()
episode = nil,
intro_start = nil,
intro_end = nil,
payload = nil,
payload_source = nil,
found = false,
prompt_shown = false,
},
auto_play_ready_gate_armed = false,
auto_play_ready_timeout = nil,
auto_play_ready_osd_timer = nil,
}
end

View File

@@ -9,6 +9,7 @@ local function run_plugin_scenario(config)
osd = {},
logs = {},
property_sets = {},
periodic_timers = {},
}
local function make_mp_stub()
@@ -90,10 +91,32 @@ local function run_plugin_scenario(config)
end
end
function mp.add_timeout(_seconds, callback)
if callback then
function mp.add_timeout(seconds, callback)
local timeout = {
killed = false,
}
function timeout:kill()
self.killed = true
end
local delay = tonumber(seconds) or 0
if callback and delay < 5 then
callback()
end
return timeout
end
function mp.add_periodic_timer(seconds, callback)
local timer = {
seconds = seconds,
killed = false,
callback = callback,
}
function timer:kill()
self.killed = true
end
recorded.periodic_timers[#recorded.periodic_timers + 1] = timer
return timer
end
function mp.register_script_message(name, fn)
@@ -281,6 +304,26 @@ local function find_control_call(async_calls, flag)
return nil
end
local function count_control_calls(async_calls, flag)
local count = 0
for _, call in ipairs(async_calls) do
local args = call.args or {}
local has_flag = false
local has_start = false
for _, value in ipairs(args) do
if value == flag then
has_flag = true
elseif value == "--start" then
has_start = true
end
end
if has_flag and not has_start then
count = count + 1
end
end
return count
end
local function call_has_arg(call, target)
local args = (call and call.args) or {}
for _, value in ipairs(args) do
@@ -352,6 +395,16 @@ local function count_osd_message(messages, target)
return count
end
local function count_property_set(property_sets, name, value)
local count = 0
for _, call in ipairs(property_sets) do
if call.name == name and call.value == value then
count = count + 1
end
end
return count
end
local function fire_event(recorded, name)
local listeners = recorded.events[name] or {}
for _, listener in ipairs(listeners) do
@@ -454,12 +507,12 @@ do
local start_call = find_start_call(recorded.async_calls)
assert_true(start_call ~= nil, "auto-start should issue --start command")
assert_true(
not call_has_arg(start_call, "--show-visible-overlay"),
"auto-start should keep --start command free of --show-visible-overlay"
call_has_arg(start_call, "--show-visible-overlay"),
"auto-start with visible overlay enabled should include --show-visible-overlay on --start"
)
assert_true(
not call_has_arg(start_call, "--hide-visible-overlay"),
"auto-start should keep --start command free of --hide-visible-overlay"
"auto-start with visible overlay enabled should not include --hide-visible-overlay on --start"
)
assert_true(
find_control_call(recorded.async_calls, "--show-visible-overlay") ~= nil,
@@ -493,12 +546,64 @@ do
count_start_calls(recorded.async_calls) == 1,
"duplicate file-loaded events should not issue duplicate --start commands while overlay is already running"
)
assert_true(
count_control_calls(recorded.async_calls, "--show-visible-overlay") == 2,
"duplicate auto-start should re-assert visible overlay state when overlay is already running"
)
assert_true(
count_osd_message(recorded.osd, "SubMiner: Already running") == 0,
"duplicate auto-start events should not show Already running OSD"
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
option_overrides = {
binary_path = binary_path,
auto_start = "yes",
auto_start_visible_overlay = "yes",
auto_start_pause_until_ready = "yes",
socket_path = "/tmp/subminer-socket",
},
input_ipc_server = "/tmp/subminer-socket",
media_title = "Random Movie",
files = {
[binary_path] = true,
},
})
assert_true(recorded ~= nil, "plugin failed to load for duplicate auto-start pause-until-ready scenario: " .. tostring(err))
fire_event(recorded, "file-loaded")
assert_true(recorded.script_messages["subminer-autoplay-ready"] ~= nil, "subminer-autoplay-ready script message not registered")
recorded.script_messages["subminer-autoplay-ready"]()
fire_event(recorded, "file-loaded")
recorded.script_messages["subminer-autoplay-ready"]()
assert_true(
count_start_calls(recorded.async_calls) == 1,
"duplicate pause-until-ready auto-start should not issue duplicate --start commands while overlay is already running"
)
assert_true(
count_control_calls(recorded.async_calls, "--show-visible-overlay") == 4,
"duplicate pause-until-ready auto-start should re-assert visible overlay on both start and ready events"
)
assert_true(
count_osd_message(recorded.osd, "SubMiner: Loading subtitle tokenization...") == 2,
"duplicate pause-until-ready auto-start should arm tokenization loading gate for each file"
)
assert_true(
count_osd_message(recorded.osd, "SubMiner: Subtitle tokenization ready") == 2,
"duplicate pause-until-ready auto-start should release tokenization gate for each file"
)
assert_true(
count_property_set(recorded.property_sets, "pause", true) == 2,
"duplicate pause-until-ready auto-start should force pause for each file"
)
assert_true(
count_property_set(recorded.property_sets, "pause", false) == 2,
"duplicate pause-until-ready auto-start should resume playback for each file"
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
@@ -528,13 +633,58 @@ do
"autoplay-ready script message should resume mpv playback"
)
assert_true(
has_osd_message(recorded.osd, "SubMiner: Loading subtitle annotations..."),
has_osd_message(recorded.osd, "SubMiner: Loading subtitle tokenization..."),
"pause-until-ready auto-start should show loading OSD message"
)
assert_true(
has_osd_message(recorded.osd, "SubMiner: Subtitle annotations loaded"),
not has_osd_message(recorded.osd, "SubMiner: Starting..."),
"pause-until-ready auto-start should avoid replacing loading OSD with generic starting OSD"
)
assert_true(
has_osd_message(recorded.osd, "SubMiner: Subtitle tokenization ready"),
"autoplay-ready should show loaded OSD message"
)
assert_true(
count_control_calls(recorded.async_calls, "--show-visible-overlay") == 2,
"autoplay-ready should re-assert visible overlay state"
)
assert_true(
#recorded.periodic_timers == 1,
"pause-until-ready auto-start should create periodic loading OSD refresher"
)
assert_true(
recorded.periodic_timers[1].killed == true,
"autoplay-ready should stop periodic loading OSD refresher"
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
option_overrides = {
binary_path = binary_path,
auto_start = "yes",
auto_start_visible_overlay = "yes",
auto_start_pause_until_ready = "yes",
socket_path = "/tmp/subminer-socket",
},
input_ipc_server = "/tmp/subminer-socket",
media_title = "Random Movie",
files = {
[binary_path] = true,
},
})
assert_true(recorded ~= nil, "plugin failed to load for pause cleanup scenario: " .. tostring(err))
fire_event(recorded, "file-loaded")
fire_event(recorded, "end-file")
assert_true(
count_property_set(recorded.property_sets, "pause", true) == 1,
"pause cleanup scenario should force pause while waiting for tokenization"
)
assert_true(
count_property_set(recorded.property_sets, "pause", false) == 1,
"ending file while gate is armed should clear forced pause state"
)
end
do
@@ -557,12 +707,12 @@ do
local start_call = find_start_call(recorded.async_calls)
assert_true(start_call ~= nil, "auto-start should issue --start command")
assert_true(
not call_has_arg(start_call, "--hide-visible-overlay"),
"auto-start should keep --start command free of --hide-visible-overlay"
call_has_arg(start_call, "--hide-visible-overlay"),
"auto-start with visible overlay disabled should include --hide-visible-overlay on --start"
)
assert_true(
not call_has_arg(start_call, "--show-visible-overlay"),
"auto-start should keep --start command free of --show-visible-overlay"
"auto-start with visible overlay disabled should not include --show-visible-overlay on --start"
)
assert_true(
find_control_call(recorded.async_calls, "--hide-visible-overlay") ~= nil,

View File

@@ -316,3 +316,33 @@ test('FieldGroupingMergeCollaborator deduplicates identical sentence, audio, and
assert.equal(merged.Picture, '<img data-group-id="202" src="same.png">');
assert.equal(merged.ExpressionAudio, merged.SentenceAudio);
});
test('AnkiIntegration.formatMiscInfoPattern avoids leaking Jellyfin api_key query params', () => {
const integration = new AnkiIntegration(
{
metadata: {
pattern: '[SubMiner] %f (%t)',
},
} as never,
{} as never,
{
currentSubText: '',
currentVideoPath:
'stream?static=true&api_key=secret-token&MediaSourceId=a762ab23d26d4347e3cacdb83aaae405&AudioStreamIndex=3',
currentTimePos: 426,
currentSubStart: 426,
currentSubEnd: 428,
currentAudioStreamIndex: 3,
currentMediaTitle: '[Jellyfin/direct] Bocchi the Rock! - S01E02',
send: () => true,
} as unknown as never,
);
const privateApi = integration as unknown as {
formatMiscInfoPattern: (fallbackFilename: string, startTimeSeconds?: number) => string;
};
const result = privateApi.formatMiscInfoPattern('audio_123.mp3', 426);
assert.equal(result, '[SubMiner] [Jellyfin/direct] Bocchi the Rock! - S01E02 (00:07:06)');
assert.equal(result.includes('api_key='), false);
});

View File

@@ -58,6 +58,55 @@ interface NoteInfo {
type CardKind = 'sentence' | 'audio';
function trimToNonEmptyString(value: unknown): string | null {
if (typeof value !== 'string') return null;
const trimmed = value.trim();
return trimmed.length > 0 ? trimmed : null;
}
function decodeURIComponentSafe(value: string): string {
try {
return decodeURIComponent(value);
} catch {
return value;
}
}
function extractFilenameFromMediaPath(rawPath: string): string {
const trimmedPath = rawPath.trim();
if (!trimmedPath) return '';
if (/^[a-zA-Z][a-zA-Z\d+\-.]*:\/\//.test(trimmedPath)) {
try {
const parsed = new URL(trimmedPath);
return decodeURIComponentSafe(path.basename(parsed.pathname));
} catch {
// Fall through to separator-based handling below.
}
}
const separatorIndex = trimmedPath.search(/[?#]/);
const pathWithoutQuery = separatorIndex >= 0 ? trimmedPath.slice(0, separatorIndex) : trimmedPath;
return decodeURIComponentSafe(path.basename(pathWithoutQuery));
}
function shouldPreferMediaTitleForMiscInfo(rawPath: string, filename: string): boolean {
const loweredPath = rawPath.toLowerCase();
const loweredFilename = filename.toLowerCase();
if (loweredPath.includes('api_key=')) {
return true;
}
if (loweredPath.startsWith('http://') || loweredPath.startsWith('https://')) {
return true;
}
return (
loweredFilename === 'stream' ||
loweredFilename === 'master.m3u8' ||
loweredFilename === 'index.m3u8' ||
loweredFilename === 'playlist.m3u8'
);
}
export class AnkiIntegration {
private client: AnkiConnectClient;
private mediaGenerator: MediaGenerator;
@@ -729,8 +778,12 @@ export class AnkiIntegration {
}
const currentVideoPath = this.mpvClient.currentVideoPath || '';
const videoFilename = currentVideoPath ? path.basename(currentVideoPath) : '';
const filenameWithExt = videoFilename || fallbackFilename;
const videoFilename = extractFilenameFromMediaPath(currentVideoPath);
const mediaTitle = trimToNonEmptyString(this.mpvClient.currentMediaTitle);
const filenameWithExt =
(shouldPreferMediaTitleForMiscInfo(currentVideoPath, videoFilename)
? mediaTitle || videoFilename
: videoFilename || mediaTitle) || fallbackFilename;
const filenameWithoutExt = filenameWithExt.replace(/\.[^.]+$/, '');
const currentTimePos =

View File

@@ -1,6 +1,11 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { hasExplicitCommand, parseArgs, shouldRunSettingsOnlyStartup, shouldStartApp } from './args';
import {
hasExplicitCommand,
parseArgs,
shouldRunSettingsOnlyStartup,
shouldStartApp,
} from './args';
test('parseArgs parses booleans and value flags', () => {
const args = parseArgs([
@@ -148,10 +153,7 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
'/tmp/subminer-jf-response.json',
]);
assert.equal(jellyfinPreviewAuth.jellyfinPreviewAuth, true);
assert.equal(
jellyfinPreviewAuth.jellyfinResponsePath,
'/tmp/subminer-jf-response.json',
);
assert.equal(jellyfinPreviewAuth.jellyfinResponsePath, '/tmp/subminer-jf-response.json');
assert.equal(hasExplicitCommand(jellyfinPreviewAuth), true);
assert.equal(shouldStartApp(jellyfinPreviewAuth), false);

View File

@@ -240,7 +240,9 @@ export function parseArgs(argv: string[]): CliArgs {
if (value === 'true' || value === '1' || value === 'yes') args.jellyfinRecursive = true;
if (value === 'false' || value === '0' || value === 'no') args.jellyfinRecursive = false;
} else if (arg === '--jellyfin-recursive') {
const value = readValue(argv[i + 1])?.trim().toLowerCase();
const value = readValue(argv[i + 1])
?.trim()
.toLowerCase();
if (value === 'false' || value === '0' || value === 'no') {
args.jellyfinRecursive = false;
} else if (value === 'true' || value === '1' || value === 'yes') {

View File

@@ -33,6 +33,7 @@ test('loads defaults when config is missing', () => {
assert.equal(config.subtitleStyle.backgroundColor, 'rgb(30, 32, 48, 0.88)');
assert.equal(config.subtitleStyle.preserveLineBreaks, false);
assert.equal(config.subtitleStyle.autoPauseVideoOnHover, true);
assert.equal(config.subtitleStyle.autoPauseVideoOnYomitanPopup, false);
assert.equal(config.subtitleStyle.hoverTokenColor, '#f4dbd6');
assert.equal(config.subtitleStyle.hoverTokenBackgroundColor, 'rgba(54, 58, 79, 0.84)');
assert.equal(
@@ -47,7 +48,10 @@ test('loads defaults when config is missing', () => {
assert.equal(config.subtitleStyle.textRendering, 'geometricPrecision');
assert.equal(config.subtitleStyle.textShadow, '0 3px 10px rgba(0,0,0,0.69)');
assert.equal(config.subtitleStyle.backdropFilter, 'blur(6px)');
assert.equal(config.subtitleStyle.secondary.fontFamily, 'Inter, Noto Sans, Helvetica Neue, sans-serif');
assert.equal(
config.subtitleStyle.secondary.fontFamily,
'Inter, Noto Sans, Helvetica Neue, sans-serif',
);
assert.equal(config.subtitleStyle.secondary.fontColor, '#cad3f5');
assert.equal(config.immersionTracking.enabled, true);
assert.equal(config.immersionTracking.dbPath, '');
@@ -157,6 +161,44 @@ test('parses subtitleStyle.autoPauseVideoOnHover and warns on invalid values', (
);
});
test('parses subtitleStyle.autoPauseVideoOnYomitanPopup and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"subtitleStyle": {
"autoPauseVideoOnYomitanPopup": true
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(validService.getConfig().subtitleStyle.autoPauseVideoOnYomitanPopup, true);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"subtitleStyle": {
"autoPauseVideoOnYomitanPopup": "yes"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
invalidService.getConfig().subtitleStyle.autoPauseVideoOnYomitanPopup,
DEFAULT_CONFIG.subtitleStyle.autoPauseVideoOnYomitanPopup,
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'subtitleStyle.autoPauseVideoOnYomitanPopup'),
);
});
test('parses subtitleStyle.hoverTokenColor and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(

View File

@@ -50,6 +50,7 @@ export const CORE_DEFAULT_CONFIG: Pick<
alass_path: '',
ffsubsync_path: '',
ffmpeg_path: '',
replace: true,
},
startupWarmups: {
lowPowerMode: false,

View File

@@ -5,6 +5,7 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
enableJlpt: false,
preserveLineBreaks: false,
autoPauseVideoOnHover: true,
autoPauseVideoOnYomitanPopup: false,
hoverTokenColor: '#f4dbd6',
hoverTokenBackgroundColor: 'rgba(54, 58, 79, 0.84)',
fontFamily: 'M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP',

View File

@@ -20,6 +20,7 @@ test('config option registry includes critical paths and has unique entries', ()
'logging.level',
'startupWarmups.lowPowerMode',
'subtitleStyle.enableJlpt',
'subtitleStyle.autoPauseVideoOnYomitanPopup',
'ankiConnect.enabled',
'immersionTracking.enabled',
]) {

View File

@@ -32,6 +32,12 @@ export function buildCoreConfigOptionRegistry(
defaultValue: defaultConfig.subsync.defaultMode,
description: 'Subsync default mode.',
},
{
path: 'subsync.replace',
kind: 'boolean',
defaultValue: defaultConfig.subsync.replace,
description: 'Replace the active subtitle file when sync completes.',
},
{
path: 'startupWarmups.lowPowerMode',
kind: 'boolean',

View File

@@ -28,6 +28,13 @@ export function buildSubtitleConfigOptionRegistry(
description:
'Automatically pause mpv playback while hovering subtitle text, then resume on leave.',
},
{
path: 'subtitleStyle.autoPauseVideoOnYomitanPopup',
kind: 'boolean',
defaultValue: defaultConfig.subtitleStyle.autoPauseVideoOnYomitanPopup,
description:
'Automatically pause mpv playback while Yomitan popup is open, then resume when popup closes.',
},
{
path: 'subtitleStyle.hoverTokenColor',
kind: 'string',

View File

@@ -44,6 +44,8 @@ export const SPECIAL_COMMANDS = {
RUNTIME_OPTION_CYCLE_PREFIX: '__runtime-option-cycle:',
REPLAY_SUBTITLE: '__replay-subtitle',
PLAY_NEXT_SUBTITLE: '__play-next-subtitle',
SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START: '__sub-delay-next-line',
SHIFT_SUB_DELAY_TO_PREVIOUS_SUBTITLE_START: '__sub-delay-prev-line',
} as const;
export const DEFAULT_KEYBINDINGS: NonNullable<ResolvedConfig['keybindings']> = [
@@ -56,6 +58,11 @@ export const DEFAULT_KEYBINDINGS: NonNullable<ResolvedConfig['keybindings']> = [
{ key: 'ArrowDown', command: ['seek', -60] },
{ key: 'Shift+KeyH', command: ['sub-seek', -1] },
{ key: 'Shift+KeyL', command: ['sub-seek', 1] },
{ key: 'Shift+BracketRight', command: [SPECIAL_COMMANDS.SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START] },
{
key: 'Shift+BracketLeft',
command: [SPECIAL_COMMANDS.SHIFT_SUB_DELAY_TO_PREVIOUS_SUBTITLE_START],
},
{ key: 'Ctrl+Shift+KeyH', command: [SPECIAL_COMMANDS.REPLAY_SUBTITLE] },
{ key: 'Ctrl+Shift+KeyL', command: [SPECIAL_COMMANDS.PLAY_NEXT_SUBTITLE] },
{ key: 'KeyQ', command: ['quit'] },

View File

@@ -173,6 +173,12 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
if (ffsubsync !== undefined) resolved.subsync.ffsubsync_path = ffsubsync;
const ffmpeg = asString(src.subsync.ffmpeg_path);
if (ffmpeg !== undefined) resolved.subsync.ffmpeg_path = ffmpeg;
const replace = asBoolean(src.subsync.replace);
if (replace !== undefined) {
resolved.subsync.replace = replace;
} else if (src.subsync.replace !== undefined) {
warn('subsync.replace', src.subsync.replace, resolved.subsync.replace, 'Expected boolean.');
}
}
if (isObject(src.subtitlePosition)) {

View File

@@ -99,8 +99,9 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
if (isObject(src.subtitleStyle)) {
const fallbackSubtitleStyleEnableJlpt = resolved.subtitleStyle.enableJlpt;
const fallbackSubtitleStylePreserveLineBreaks = resolved.subtitleStyle.preserveLineBreaks;
const fallbackSubtitleStyleAutoPauseVideoOnHover =
resolved.subtitleStyle.autoPauseVideoOnHover;
const fallbackSubtitleStyleAutoPauseVideoOnHover = resolved.subtitleStyle.autoPauseVideoOnHover;
const fallbackSubtitleStyleAutoPauseVideoOnYomitanPopup =
resolved.subtitleStyle.autoPauseVideoOnYomitanPopup;
const fallbackSubtitleStyleHoverTokenColor = resolved.subtitleStyle.hoverTokenColor;
const fallbackSubtitleStyleHoverTokenBackgroundColor =
resolved.subtitleStyle.hoverTokenBackgroundColor;
@@ -161,8 +162,7 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
if (autoPauseVideoOnHover !== undefined) {
resolved.subtitleStyle.autoPauseVideoOnHover = autoPauseVideoOnHover;
} else if (
(src.subtitleStyle as { autoPauseVideoOnHover?: unknown }).autoPauseVideoOnHover !==
undefined
(src.subtitleStyle as { autoPauseVideoOnHover?: unknown }).autoPauseVideoOnHover !== undefined
) {
resolved.subtitleStyle.autoPauseVideoOnHover = fallbackSubtitleStyleAutoPauseVideoOnHover;
warn(
@@ -173,6 +173,27 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
);
}
const autoPauseVideoOnYomitanPopup = asBoolean(
(src.subtitleStyle as { autoPauseVideoOnYomitanPopup?: unknown })
.autoPauseVideoOnYomitanPopup,
);
if (autoPauseVideoOnYomitanPopup !== undefined) {
resolved.subtitleStyle.autoPauseVideoOnYomitanPopup = autoPauseVideoOnYomitanPopup;
} else if (
(src.subtitleStyle as { autoPauseVideoOnYomitanPopup?: unknown })
.autoPauseVideoOnYomitanPopup !== undefined
) {
resolved.subtitleStyle.autoPauseVideoOnYomitanPopup =
fallbackSubtitleStyleAutoPauseVideoOnYomitanPopup;
warn(
'subtitleStyle.autoPauseVideoOnYomitanPopup',
(src.subtitleStyle as { autoPauseVideoOnYomitanPopup?: unknown })
.autoPauseVideoOnYomitanPopup,
resolved.subtitleStyle.autoPauseVideoOnYomitanPopup,
'Expected boolean.',
);
}
const hoverTokenColor = asColor(
(src.subtitleStyle as { hoverTokenColor?: unknown }).hoverTokenColor,
);

View File

@@ -47,6 +47,25 @@ test('subtitleStyle autoPauseVideoOnHover falls back on invalid value', () => {
);
});
test('subtitleStyle autoPauseVideoOnYomitanPopup falls back on invalid value', () => {
const { context, warnings } = createResolveContext({
subtitleStyle: {
autoPauseVideoOnYomitanPopup: 'invalid' as unknown as boolean,
},
});
applySubtitleDomainConfig(context);
assert.equal(context.resolved.subtitleStyle.autoPauseVideoOnYomitanPopup, false);
assert.ok(
warnings.some(
(warning) =>
warning.path === 'subtitleStyle.autoPauseVideoOnYomitanPopup' &&
warning.message === 'Expected boolean.',
),
);
});
test('subtitleStyle frequencyDictionary.matchMode accepts valid values and warns on invalid', () => {
const valid = createResolveContext({
subtitleStyle: {

View File

@@ -129,3 +129,39 @@ test('createFrequencyDictionaryLookup parses composite displayValue by primary r
assert.equal(lookup('鍛える'), 3272);
assert.equal(lookup('高み'), 9933);
});
test('createFrequencyDictionaryLookup does not require synchronous fs APIs', async () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-frequency-dict-'));
const bankPath = path.join(tempDir, 'term_meta_bank_1.json');
fs.writeFileSync(bankPath, JSON.stringify([['猫', 1, { frequency: { displayValue: 42 } }]]));
const readFileSync = fs.readFileSync;
const readdirSync = fs.readdirSync;
const statSync = fs.statSync;
const existsSync = fs.existsSync;
(fs as unknown as Record<string, unknown>).readFileSync = () => {
throw new Error('sync read disabled');
};
(fs as unknown as Record<string, unknown>).readdirSync = () => {
throw new Error('sync readdir disabled');
};
(fs as unknown as Record<string, unknown>).statSync = () => {
throw new Error('sync stat disabled');
};
(fs as unknown as Record<string, unknown>).existsSync = () => {
throw new Error('sync exists disabled');
};
try {
const lookup = await createFrequencyDictionaryLookup({
searchPaths: [tempDir],
log: () => undefined,
});
assert.equal(lookup('猫'), 42);
} finally {
(fs as unknown as Record<string, unknown>).readFileSync = readFileSync;
(fs as unknown as Record<string, unknown>).readdirSync = readdirSync;
(fs as unknown as Record<string, unknown>).statSync = statSync;
(fs as unknown as Record<string, unknown>).existsSync = existsSync;
}
});

View File

@@ -1,4 +1,4 @@
import * as fs from 'node:fs';
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
export interface FrequencyDictionaryLookupOptions {
@@ -13,6 +13,17 @@ interface FrequencyDictionaryEntry {
const FREQUENCY_BANK_FILE_GLOB = /^term_meta_bank_.*\.json$/;
const NOOP_LOOKUP = (): null => null;
const ENTRY_YIELD_INTERVAL = 5000;
function isErrorCode(error: unknown, code: string): boolean {
return Boolean(error && typeof error === 'object' && (error as { code?: unknown }).code === code);
}
async function yieldToEventLoop(): Promise<void> {
await new Promise<void>((resolve) => {
setImmediate(resolve);
});
}
function normalizeFrequencyTerm(value: string): string {
return value.trim().toLowerCase();
@@ -93,16 +104,22 @@ function asFrequencyDictionaryEntry(entry: unknown): FrequencyDictionaryEntry |
};
}
function addEntriesToMap(
async function addEntriesToMap(
rawEntries: unknown,
terms: Map<string, number>,
): { duplicateCount: number } {
): Promise<{ duplicateCount: number }> {
if (!Array.isArray(rawEntries)) {
return { duplicateCount: 0 };
}
let duplicateCount = 0;
let processedCount = 0;
for (const rawEntry of rawEntries) {
processedCount += 1;
if (processedCount % ENTRY_YIELD_INTERVAL === 0) {
await yieldToEventLoop();
}
const entry = asFrequencyDictionaryEntry(rawEntry);
if (!entry) {
continue;
@@ -119,15 +136,15 @@ function addEntriesToMap(
return { duplicateCount };
}
function collectDictionaryFromPath(
async function collectDictionaryFromPath(
dictionaryPath: string,
log: (message: string) => void,
): Map<string, number> {
): Promise<Map<string, number>> {
const terms = new Map<string, number>();
let fileNames: string[];
try {
fileNames = fs.readdirSync(dictionaryPath);
fileNames = await fs.readdir(dictionaryPath);
} catch (error) {
log(`Failed to read frequency dictionary directory ${dictionaryPath}: ${String(error)}`);
return terms;
@@ -143,7 +160,7 @@ function collectDictionaryFromPath(
const bankPath = path.join(dictionaryPath, bankFile);
let rawText: string;
try {
rawText = fs.readFileSync(bankPath, 'utf-8');
rawText = await fs.readFile(bankPath, 'utf-8');
} catch {
log(`Failed to read frequency dictionary file ${bankPath}`);
continue;
@@ -151,6 +168,7 @@ function collectDictionaryFromPath(
let rawEntries: unknown;
try {
await yieldToEventLoop();
rawEntries = JSON.parse(rawText) as unknown;
} catch {
log(`Failed to parse frequency dictionary file as JSON: ${bankPath}`);
@@ -158,7 +176,7 @@ function collectDictionaryFromPath(
}
const beforeSize = terms.size;
const { duplicateCount } = addEntriesToMap(rawEntries, terms);
const { duplicateCount } = await addEntriesToMap(rawEntries, terms);
if (duplicateCount > 0) {
log(
`Frequency dictionary ignored ${duplicateCount} duplicate term entr${
@@ -185,11 +203,11 @@ export async function createFrequencyDictionaryLookup(
let isDirectory = false;
try {
if (!fs.existsSync(dictionaryPath)) {
isDirectory = (await fs.stat(dictionaryPath)).isDirectory();
} catch (error) {
if (isErrorCode(error, 'ENOENT')) {
continue;
}
isDirectory = fs.statSync(dictionaryPath).isDirectory();
} catch (error) {
options.log(
`Failed to inspect frequency dictionary path ${dictionaryPath}: ${String(error)}`,
);
@@ -201,7 +219,7 @@ export async function createFrequencyDictionaryLookup(
}
foundDictionaryPathCount += 1;
const terms = collectDictionaryFromPath(dictionaryPath, options.log);
const terms = await collectDictionaryFromPath(dictionaryPath, options.log);
if (terms.size > 0) {
options.log(`Frequency dictionary loaded from ${dictionaryPath} (${terms.size} entries)`);
return (term: string): number | null => {

View File

@@ -46,23 +46,31 @@ export function pruneRetention(
const dayCutoff = nowMs - policy.dailyRollupRetentionMs;
const monthCutoff = nowMs - policy.monthlyRollupRetentionMs;
const deletedSessionEvents = (db
.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`)
.run(eventCutoff) as { changes: number }).changes;
const deletedTelemetryRows = (db
.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`)
.run(telemetryCutoff) as { changes: number }).changes;
const deletedDailyRows = (db
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
.run(Math.floor(dayCutoff / DAILY_MS)) as { changes: number }).changes;
const deletedMonthlyRows = (db
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
.run(toMonthKey(monthCutoff)) as { changes: number }).changes;
const deletedEndedSessions = (db
.prepare(
`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`,
)
.run(telemetryCutoff) as { changes: number }).changes;
const deletedSessionEvents = (
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(eventCutoff) as {
changes: number;
}
).changes;
const deletedTelemetryRows = (
db.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`).run(telemetryCutoff) as {
changes: number;
}
).changes;
const deletedDailyRows = (
db
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
.run(Math.floor(dayCutoff / DAILY_MS)) as { changes: number }
).changes;
const deletedMonthlyRows = (
db
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
.run(toMonthKey(monthCutoff)) as { changes: number }
).changes;
const deletedEndedSessions = (
db
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
.run(telemetryCutoff) as { changes: number }
).changes;
return {
deletedSessionEvents,

View File

@@ -17,6 +17,9 @@ test('extractLineVocabulary returns words and unique kanji', () => {
new Set(result.words.map((entry) => `${entry.headword}/${entry.word}`)),
new Set(['hello/hello', '你好/你好', '猫/猫']),
);
assert.equal(result.words.every((entry) => entry.reading === ''), true);
assert.equal(
result.words.every((entry) => entry.reading === ''),
true,
);
assert.deepEqual(new Set(result.kanji), new Set(['你', '好', '猫']));
});

View File

@@ -97,7 +97,8 @@ export function extractLineVocabulary(value: string): ExtractedLineVocabulary {
if (!cleaned) return { words: [], kanji: [] };
const wordSet = new Set<string>();
const tokenPattern = /[A-Za-z0-9']+|[\u3040-\u30ff]+|[\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df]+/g;
const tokenPattern =
/[A-Za-z0-9']+|[\u3040-\u30ff]+|[\u3400-\u4dbf\u4e00-\u9fff\u20000-\u2a6df]+/g;
const rawWords = cleaned.match(tokenPattern) ?? [];
for (const rawWord of rawWords) {
const normalizedWord = normalizeText(rawWord.toLowerCase());

View File

@@ -19,15 +19,8 @@ export function startSessionRecord(
CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?)
`,
)
.run(
sessionUuid,
videoId,
startedAtMs,
SESSION_STATUS_ACTIVE,
startedAtMs,
nowMs,
);
)
.run(sessionUuid, videoId, startedAtMs, SESSION_STATUS_ACTIVE, startedAtMs, nowMs);
const sessionId = Number(result.lastInsertRowid);
return {
sessionId,

View File

@@ -59,9 +59,7 @@ testIfSqlite('ensureSchema creates immersion core tables', () => {
assert.ok(tableNames.has('imm_rollup_state'));
const rollupStateRow = db
.prepare(
'SELECT state_value FROM imm_rollup_state WHERE state_key = ?',
)
.prepare('SELECT state_value FROM imm_rollup_state WHERE state_key = ?')
.get('last_rollup_sample_ms') as {
state_value: number;
} | null;
@@ -188,7 +186,9 @@ testIfSqlite('executeQueuedWrite inserts and upserts word and kanji rows', () =>
stmts.kanjiUpsertStmt.run('日', 8.0, 11.0);
const wordRow = db
.prepare('SELECT headword, frequency, first_seen, last_seen FROM imm_words WHERE headword = ?')
.prepare(
'SELECT headword, frequency, first_seen, last_seen FROM imm_words WHERE headword = ?',
)
.get('猫') as {
headword: string;
frequency: number;

View File

@@ -426,11 +426,7 @@ export function getOrCreateVideoRecord(
LAST_UPDATE_DATE = ?
WHERE video_id = ?
`,
).run(
details.canonicalTitle || 'unknown',
Date.now(),
existing.video_id,
);
).run(details.canonicalTitle || 'unknown', Date.now(), existing.video_id);
return existing.video_id;
}

View File

@@ -129,7 +129,11 @@ interface QueuedKanjiWrite {
lastSeen: number;
}
export type QueuedWrite = QueuedTelemetryWrite | QueuedEventWrite | QueuedWordWrite | QueuedKanjiWrite;
export type QueuedWrite =
| QueuedTelemetryWrite
| QueuedEventWrite
| QueuedWordWrite
| QueuedKanjiWrite;
export interface VideoMetadata {
sourceType: number;

View File

@@ -10,6 +10,7 @@ export {
unregisterOverlayShortcutsRuntime,
} from './overlay-shortcut';
export { createOverlayShortcutRuntimeHandlers } from './overlay-shortcut-handler';
export { createShiftSubtitleDelayToAdjacentCueHandler } from './subtitle-delay-shift';
export { createCliCommandDepsRuntime, handleCliCommand } from './cli-command';
export {
copyCurrentSubtitle,

View File

@@ -13,6 +13,8 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
RUNTIME_OPTION_CYCLE_PREFIX: '__runtime-option-cycle:',
REPLAY_SUBTITLE: '__replay-subtitle',
PLAY_NEXT_SUBTITLE: '__play-next-subtitle',
SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START: '__sub-delay-next-line',
SHIFT_SUB_DELAY_TO_PREVIOUS_SUBTITLE_START: '__sub-delay-prev-line',
},
triggerSubsyncFromConfig: () => {
calls.push('subsync');
@@ -30,6 +32,9 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
mpvPlayNextSubtitle: () => {
calls.push('next');
},
shiftSubDelayToAdjacentSubtitle: async (direction) => {
calls.push(`shift:${direction}`);
},
mpvSendCommand: (command) => {
sentCommands.push(command);
},
@@ -68,6 +73,21 @@ test('handleMpvCommandFromIpc emits osd for secondary subtitle track keybinding
assert.deepEqual(osd, ['Secondary subtitle track: ${secondary-sid}']);
});
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', () => {
const { options, sentCommands, osd } = createOptions();
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
});
test('handleMpvCommandFromIpc dispatches special subtitle-delay shift command', () => {
const { options, calls, sentCommands, osd } = createOptions();
handleMpvCommandFromIpc(['__sub-delay-next-line'], options);
assert.deepEqual(calls, ['shift:next']);
assert.deepEqual(sentCommands, []);
assert.deepEqual(osd, []);
});
test('handleMpvCommandFromIpc does not forward commands while disconnected', () => {
const { options, sentCommands, osd } = createOptions({
isMpvConnected: () => false,

View File

@@ -12,6 +12,8 @@ export interface HandleMpvCommandFromIpcOptions {
RUNTIME_OPTION_CYCLE_PREFIX: string;
REPLAY_SUBTITLE: string;
PLAY_NEXT_SUBTITLE: string;
SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START: string;
SHIFT_SUB_DELAY_TO_PREVIOUS_SUBTITLE_START: string;
};
triggerSubsyncFromConfig: () => void;
openRuntimeOptionsPalette: () => void;
@@ -19,6 +21,7 @@ export interface HandleMpvCommandFromIpcOptions {
showMpvOsd: (text: string) => void;
mpvReplaySubtitle: () => void;
mpvPlayNextSubtitle: () => void;
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
mpvSendCommand: (command: (string | number)[]) => void;
isMpvConnected: () => boolean;
hasRuntimeOptionsManager: () => boolean;
@@ -46,6 +49,9 @@ function resolveProxyCommandOsd(command: (string | number)[]): string | null {
if (property === 'secondary-sid') {
return 'Secondary subtitle track: ${secondary-sid}';
}
if (property === 'sub-delay') {
return 'Subtitle delay: ${sub-delay}';
}
return null;
}
@@ -64,6 +70,20 @@ export function handleMpvCommandFromIpc(
return;
}
if (
first === options.specialCommands.SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START ||
first === options.specialCommands.SHIFT_SUB_DELAY_TO_PREVIOUS_SUBTITLE_START
) {
const direction =
first === options.specialCommands.SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START
? 'next'
: 'previous';
options.shiftSubDelayToAdjacentSubtitle(direction).catch((error) => {
options.showMpvOsd(`Subtitle delay shift failed: ${(error as Error).message}`);
});
return;
}
if (first.startsWith(options.specialCommands.RUNTIME_OPTION_CYCLE_PREFIX)) {
if (!options.hasRuntimeOptionsManager()) return;
const [, idToken, directionToken] = first.split(':');

View File

@@ -0,0 +1,75 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import { createJlptVocabularyLookup } from './jlpt-vocab';
test('createJlptVocabularyLookup loads JLPT bank entries and resolves known levels', async () => {
const logs: string[] = [];
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-jlpt-dict-'));
fs.writeFileSync(
path.join(tempDir, 'term_meta_bank_5.json'),
JSON.stringify([
['猫', 1, { frequency: { displayValue: 1 } }],
['犬', 2, { frequency: { displayValue: 2 } }],
]),
);
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_1.json'), JSON.stringify([]));
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_2.json'), JSON.stringify([]));
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_3.json'), JSON.stringify([]));
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_4.json'), JSON.stringify([]));
const lookup = await createJlptVocabularyLookup({
searchPaths: [tempDir],
log: (message) => {
logs.push(message);
},
});
assert.equal(lookup('猫'), 'N5');
assert.equal(lookup('犬'), 'N5');
assert.equal(lookup('鳥'), null);
assert.equal(
logs.some((entry) => entry.includes('JLPT dictionary loaded from')),
true,
);
});
test('createJlptVocabularyLookup does not require synchronous fs APIs', async () => {
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-jlpt-dict-'));
fs.writeFileSync(
path.join(tempDir, 'term_meta_bank_4.json'),
JSON.stringify([['見る', 1, { frequency: { displayValue: 3 } }]]),
);
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_1.json'), JSON.stringify([]));
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_2.json'), JSON.stringify([]));
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_3.json'), JSON.stringify([]));
fs.writeFileSync(path.join(tempDir, 'term_meta_bank_5.json'), JSON.stringify([]));
const readFileSync = fs.readFileSync;
const statSync = fs.statSync;
const existsSync = fs.existsSync;
(fs as unknown as Record<string, unknown>).readFileSync = () => {
throw new Error('sync read disabled');
};
(fs as unknown as Record<string, unknown>).statSync = () => {
throw new Error('sync stat disabled');
};
(fs as unknown as Record<string, unknown>).existsSync = () => {
throw new Error('sync exists disabled');
};
try {
const lookup = await createJlptVocabularyLookup({
searchPaths: [tempDir],
log: () => undefined,
});
assert.equal(lookup('見る'), 'N4');
} finally {
(fs as unknown as Record<string, unknown>).readFileSync = readFileSync;
(fs as unknown as Record<string, unknown>).statSync = statSync;
(fs as unknown as Record<string, unknown>).existsSync = existsSync;
}
});

View File

@@ -1,4 +1,4 @@
import * as fs from 'fs';
import * as fs from 'node:fs/promises';
import * as path from 'path';
import type { JlptLevel } from '../../types';
@@ -24,6 +24,17 @@ const JLPT_LEVEL_PRECEDENCE: Record<JlptLevel, number> = {
};
const NOOP_LOOKUP = (): null => null;
const ENTRY_YIELD_INTERVAL = 5000;
function isErrorCode(error: unknown, code: string): boolean {
return Boolean(error && typeof error === 'object' && (error as { code?: unknown }).code === code);
}
async function yieldToEventLoop(): Promise<void> {
await new Promise<void>((resolve) => {
setImmediate(resolve);
});
}
function normalizeJlptTerm(value: string): string {
return value.trim();
@@ -36,12 +47,12 @@ function hasFrequencyDisplayValue(meta: unknown): boolean {
return Object.prototype.hasOwnProperty.call(frequency as Record<string, unknown>, 'displayValue');
}
function addEntriesToMap(
async function addEntriesToMap(
rawEntries: unknown,
level: JlptLevel,
terms: Map<string, JlptLevel>,
log: (message: string) => void,
): void {
): Promise<void> {
const shouldUpdateLevel = (
existingLevel: JlptLevel | undefined,
incomingLevel: JlptLevel,
@@ -53,7 +64,13 @@ function addEntriesToMap(
return;
}
let processedCount = 0;
for (const rawEntry of rawEntries) {
processedCount += 1;
if (processedCount % ENTRY_YIELD_INTERVAL === 0) {
await yieldToEventLoop();
}
if (!Array.isArray(rawEntry)) {
continue;
}
@@ -84,22 +101,31 @@ function addEntriesToMap(
}
}
function collectDictionaryFromPath(
async function collectDictionaryFromPath(
dictionaryPath: string,
log: (message: string) => void,
): Map<string, JlptLevel> {
): Promise<Map<string, JlptLevel>> {
const terms = new Map<string, JlptLevel>();
for (const bank of JLPT_BANK_FILES) {
const bankPath = path.join(dictionaryPath, bank.filename);
if (!fs.existsSync(bankPath)) {
log(`JLPT bank file missing for ${bank.level}: ${bankPath}`);
try {
if (!(await fs.stat(bankPath)).isFile()) {
log(`JLPT bank file missing for ${bank.level}: ${bankPath}`);
continue;
}
} catch (error) {
if (isErrorCode(error, 'ENOENT')) {
log(`JLPT bank file missing for ${bank.level}: ${bankPath}`);
continue;
}
log(`Failed to inspect JLPT bank file ${bankPath}: ${String(error)}`);
continue;
}
let rawText: string;
try {
rawText = fs.readFileSync(bankPath, 'utf-8');
rawText = await fs.readFile(bankPath, 'utf-8');
} catch {
log(`Failed to read JLPT bank file ${bankPath}`);
continue;
@@ -107,6 +133,7 @@ function collectDictionaryFromPath(
let rawEntries: unknown;
try {
await yieldToEventLoop();
rawEntries = JSON.parse(rawText) as unknown;
} catch {
log(`Failed to parse JLPT bank file as JSON: ${bankPath}`);
@@ -119,7 +146,7 @@ function collectDictionaryFromPath(
}
const beforeSize = terms.size;
addEntriesToMap(rawEntries, bank.level, terms, log);
await addEntriesToMap(rawEntries, bank.level, terms, log);
if (terms.size === beforeSize) {
log(`JLPT bank file contained no extractable entries: ${bankPath}`);
}
@@ -137,17 +164,21 @@ export async function createJlptVocabularyLookup(
const resolvedBanks: string[] = [];
for (const dictionaryPath of options.searchPaths) {
attemptedPaths.push(dictionaryPath);
if (!fs.existsSync(dictionaryPath)) {
continue;
}
if (!fs.statSync(dictionaryPath).isDirectory()) {
let isDirectory = false;
try {
isDirectory = (await fs.stat(dictionaryPath)).isDirectory();
} catch (error) {
if (isErrorCode(error, 'ENOENT')) {
continue;
}
options.log(`Failed to inspect JLPT dictionary path ${dictionaryPath}: ${String(error)}`);
continue;
}
if (!isDirectory) continue;
foundDictionaryPathCount += 1;
const terms = collectDictionaryFromPath(dictionaryPath, options.log);
const terms = await collectDictionaryFromPath(dictionaryPath, options.log);
if (terms.size > 0) {
resolvedBanks.push(dictionaryPath);
foundBankCount += 1;

View File

@@ -57,6 +57,26 @@ test('MpvIpcClient handles sub-text property change and broadcasts tokenized sub
assert.equal(events[0]!.isOverlayVisible, false);
});
test('MpvIpcClient clears cached media title when media path changes', async () => {
const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps());
await invokeHandleMessage(client, {
event: 'property-change',
name: 'media-title',
data: '[Jellyfin/direct] Episode 1',
});
assert.equal(client.currentMediaTitle, '[Jellyfin/direct] Episode 1');
await invokeHandleMessage(client, {
event: 'property-change',
name: 'path',
data: '/tmp/new-episode.mkv',
});
assert.equal(client.currentVideoPath, '/tmp/new-episode.mkv');
assert.equal(client.currentMediaTitle, null);
});
test('MpvIpcClient parses JSON line protocol in processBuffer', () => {
const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps());
const seen: Array<Record<string, unknown>> = [];

View File

@@ -134,6 +134,7 @@ export class MpvIpcClient implements MpvClient {
private firstConnection = true;
private hasConnectedOnce = false;
public currentVideoPath = '';
public currentMediaTitle: string | null = null;
public currentTimePos = 0;
public currentSubStart = 0;
public currentSubEnd = 0;
@@ -330,6 +331,7 @@ export class MpvIpcClient implements MpvClient {
this.emit('media-path-change', payload);
},
emitMediaTitleChange: (payload) => {
this.currentMediaTitle = payload.title;
this.emit('media-title-change', payload);
},
emitSubtitleMetricsChange: (patch) => {
@@ -364,6 +366,7 @@ export class MpvIpcClient implements MpvClient {
},
setCurrentVideoPath: (value: string) => {
this.currentVideoPath = value;
this.currentMediaTitle = null;
},
emitSecondarySubtitleVisibility: (payload) => {
this.emit('secondary-subtitle-visibility', payload);

View File

@@ -2,6 +2,7 @@ import { BrowserWindow } from 'electron';
import * as path from 'path';
import { WindowGeometry } from '../../types';
import { createLogger } from '../../logger';
import { IPC_CHANNELS } from '../../shared/ipc/contracts';
const logger = createLogger('main:overlay-window');
const overlayWindowLayerByInstance = new WeakMap<BrowserWindow, OverlayWindowKind>();
@@ -24,6 +25,24 @@ function loadOverlayWindowLayer(window: BrowserWindow, layer: OverlayWindowKind)
export type OverlayWindowKind = 'visible' | 'modal';
function isLookupWindowToggleInput(input: Electron.Input): boolean {
if (input.type !== 'keyDown') return false;
if (input.alt) return false;
if (!input.control && !input.meta) return false;
if (input.shift) return false;
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
return input.code === 'KeyY' || normalizedKey === 'y';
}
function isKeyboardModeToggleInput(input: Electron.Input): boolean {
if (input.type !== 'keyDown') return false;
if (input.alt) return false;
if (!input.control && !input.meta) return false;
if (!input.shift) return false;
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
return input.code === 'KeyY' || normalizedKey === 'y';
}
export function updateOverlayWindowBounds(
geometry: WindowGeometry,
window: BrowserWindow | null,
@@ -118,6 +137,16 @@ export function createOverlayWindow(
window.webContents.on('before-input-event', (event, input) => {
if (kind === 'modal') return;
if (!window.isVisible()) return;
if (isKeyboardModeToggleInput(input)) {
event.preventDefault();
window.webContents.send(IPC_CHANNELS.event.keyboardModeToggleRequested);
return;
}
if (isLookupWindowToggleInput(input)) {
event.preventDefault();
window.webContents.send(IPC_CHANNELS.event.lookupWindowToggleRequested);
return;
}
if (!options.tryHandleOverlayShortcutLocalFallback(input)) return;
event.preventDefault();
});

View File

@@ -209,10 +209,73 @@ test('runSubsyncManual constructs ffsubsync command and returns success', async
assert.ok(ffArgs.includes(primaryPath));
assert.ok(ffArgs.includes('--reference-stream'));
assert.ok(ffArgs.includes('0:2'));
const ffOutputFlagIndex = ffArgs.indexOf('-o');
assert.equal(ffOutputFlagIndex >= 0, true);
assert.equal(ffArgs[ffOutputFlagIndex + 1], primaryPath);
assert.equal(sentCommands[0]?.[0], 'sub_add');
assert.deepEqual(sentCommands[1], ['set_property', 'sub-delay', 0]);
});
test('runSubsyncManual writes deterministic _retimed filename when replace is false', async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subsync-ffsubsync-no-replace-'));
const ffsubsyncLogPath = path.join(tmpDir, 'ffsubsync-args.log');
const ffsubsyncPath = path.join(tmpDir, 'ffsubsync.sh');
const ffmpegPath = path.join(tmpDir, 'ffmpeg.sh');
const alassPath = path.join(tmpDir, 'alass.sh');
const videoPath = path.join(tmpDir, 'video.mkv');
const primaryPath = path.join(tmpDir, 'episode.ja.srt');
fs.writeFileSync(videoPath, 'video');
fs.writeFileSync(primaryPath, 'sub');
writeExecutableScript(ffmpegPath, '#!/bin/sh\nexit 0\n');
writeExecutableScript(alassPath, '#!/bin/sh\nexit 0\n');
writeExecutableScript(
ffsubsyncPath,
`#!/bin/sh\n: > "${ffsubsyncLogPath}"\nfor arg in "$@"; do printf '%s\\n' "$arg" >> "${ffsubsyncLogPath}"; done\nout=\"\"\nprev=\"\"\nfor arg in \"$@\"; do\n if [ \"$prev\" = \"-o\" ]; then out=\"$arg\"; fi\n prev=\"$arg\"\ndone\nif [ -n \"$out\" ]; then : > \"$out\"; fi\nexit 0\n`,
);
const deps = makeDeps({
getMpvClient: () => ({
connected: true,
currentAudioStreamIndex: null,
send: () => {},
requestProperty: async (name: string) => {
if (name === 'path') return videoPath;
if (name === 'sid') return 1;
if (name === 'secondary-sid') return null;
if (name === 'track-list') {
return [
{
id: 1,
type: 'sub',
selected: true,
external: true,
'external-filename': primaryPath,
},
];
}
return null;
},
}),
getResolvedConfig: () => ({
defaultMode: 'manual',
alassPath,
ffsubsyncPath,
ffmpegPath,
replace: false,
}),
});
const result = await runSubsyncManual({ engine: 'ffsubsync', sourceTrackId: null }, deps);
assert.equal(result.ok, true);
const ffArgs = fs.readFileSync(ffsubsyncLogPath, 'utf8').trim().split('\n');
const ffOutputFlagIndex = ffArgs.indexOf('-o');
assert.equal(ffOutputFlagIndex >= 0, true);
const outputPath = ffArgs[ffOutputFlagIndex + 1];
assert.equal(outputPath, path.join(tmpDir, 'episode.ja_retimed.srt'));
});
test('runSubsyncManual constructs alass command and returns failure on non-zero exit', async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subsync-alass-'));
const alassLogPath = path.join(tmpDir, 'alass-args.log');
@@ -281,6 +344,76 @@ test('runSubsyncManual constructs alass command and returns failure on non-zero
assert.equal(alassArgs[1], primaryPath);
});
test('runSubsyncManual keeps internal alass source file alive until sync finishes', async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subsync-alass-internal-source-'));
const alassPath = path.join(tmpDir, 'alass.sh');
const ffmpegPath = path.join(tmpDir, 'ffmpeg.sh');
const ffsubsyncPath = path.join(tmpDir, 'ffsubsync.sh');
const videoPath = path.join(tmpDir, 'video.mkv');
const primaryPath = path.join(tmpDir, 'primary.srt');
fs.writeFileSync(videoPath, 'video');
fs.writeFileSync(primaryPath, 'sub');
writeExecutableScript(ffsubsyncPath, '#!/bin/sh\nexit 0\n');
writeExecutableScript(
ffmpegPath,
'#!/bin/sh\nout=""\nfor arg in "$@"; do out="$arg"; done\nif [ -n "$out" ]; then : > "$out"; fi\nexit 0\n',
);
writeExecutableScript(
alassPath,
'#!/bin/sh\nsleep 0.2\nif [ ! -f "$1" ]; then echo "missing reference subtitle" >&2; exit 1; fi\nif [ ! -f "$2" ]; then echo "missing input subtitle" >&2; exit 1; fi\n: > "$3"\nexit 0\n',
);
const sentCommands: Array<Array<string | number>> = [];
const deps = makeDeps({
getMpvClient: () => ({
connected: true,
currentAudioStreamIndex: null,
send: (payload) => {
sentCommands.push(payload.command);
},
requestProperty: async (name: string) => {
if (name === 'path') return videoPath;
if (name === 'sid') return 1;
if (name === 'secondary-sid') return null;
if (name === 'track-list') {
return [
{
id: 1,
type: 'sub',
selected: true,
external: true,
'external-filename': primaryPath,
},
{
id: 2,
type: 'sub',
selected: false,
external: false,
'ff-index': 2,
codec: 'ass',
},
];
}
return null;
},
}),
getResolvedConfig: () => ({
defaultMode: 'manual',
alassPath,
ffsubsyncPath,
ffmpegPath,
}),
});
const result = await runSubsyncManual({ engine: 'alass', sourceTrackId: 2 }, deps);
assert.equal(result.ok, true);
assert.equal(result.message, 'Subtitle synchronized with alass');
assert.equal(sentCommands[0]?.[0], 'sub_add');
assert.deepEqual(sentCommands[1], ['set_property', 'sub-delay', 0]);
});
test('runSubsyncManual resolves string sid values from mpv stream properties', async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subsync-stream-sid-'));
const ffsubsyncPath = path.join(tmpDir, 'ffsubsync.sh');

View File

@@ -215,10 +215,10 @@ function cleanupTemporaryFile(extraction: FileExtractionResult): void {
} catch {}
}
function buildRetimedPath(subPath: string): string {
function buildRetimedPath(subPath: string, replace: boolean): string {
if (replace) return subPath;
const parsed = path.parse(subPath);
const suffix = `_retimed_${Date.now()}`;
return path.join(parsed.dir, `${parsed.name}${suffix}${parsed.ext || '.srt'}`);
return path.join(parsed.dir, `${parsed.name}_retimed${parsed.ext || '.srt'}`);
}
async function runAlassSync(
@@ -265,7 +265,8 @@ async function subsyncToReference(
context.videoPath,
context.primaryTrack,
);
const outputPath = buildRetimedPath(primaryExtraction.path);
const replacePrimary = resolved.replace !== false && !primaryExtraction.temporary;
const outputPath = buildRetimedPath(primaryExtraction.path, replacePrimary);
try {
let result: CommandResult;
@@ -389,7 +390,7 @@ export async function runSubsyncManual(
let sourceExtraction: FileExtractionResult | null = null;
try {
sourceExtraction = await extractSubtitleTrackToFile(ffmpegPath, context.videoPath, sourceTrack);
return subsyncToReference('alass', sourceExtraction.path, context, resolved, client);
return await subsyncToReference('alass', sourceExtraction.path, context, resolved, client);
} finally {
if (sourceExtraction) {
cleanupTemporaryFile(sourceExtraction);

View File

@@ -0,0 +1,122 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { createShiftSubtitleDelayToAdjacentCueHandler } from './subtitle-delay-shift';
function createMpvClient(props: Record<string, unknown>) {
return {
connected: true,
requestProperty: async (name: string) => props[name],
};
}
test('shift subtitle delay to next cue using active external srt track', async () => {
const commands: Array<Array<string | number>> = [];
const osd: string[] = [];
let loadCount = 0;
const handler = createShiftSubtitleDelayToAdjacentCueHandler({
getMpvClient: () =>
createMpvClient({
'track-list': [
{
type: 'sub',
id: 2,
external: true,
'external-filename': '/tmp/subs.srt',
},
],
sid: 2,
'sub-start': 3.0,
}),
loadSubtitleSourceText: async () => {
loadCount += 1;
return `1
00:00:01,000 --> 00:00:02,000
line-1
2
00:00:03,000 --> 00:00:04,000
line-2
3
00:00:05,000 --> 00:00:06,000
line-3`;
},
sendMpvCommand: (command) => commands.push(command),
showMpvOsd: (text) => osd.push(text),
});
await handler('next');
await handler('next');
assert.equal(loadCount, 1);
assert.equal(commands.length, 2);
const delta = commands[0]?.[2];
assert.equal(commands[0]?.[0], 'add');
assert.equal(commands[0]?.[1], 'sub-delay');
assert.equal(typeof delta, 'number');
assert.equal(Math.abs((delta as number) - 2) < 0.0001, true);
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}', 'Subtitle delay: ${sub-delay}']);
});
test('shift subtitle delay to previous cue using active external ass track', async () => {
const commands: Array<Array<string | number>> = [];
const handler = createShiftSubtitleDelayToAdjacentCueHandler({
getMpvClient: () =>
createMpvClient({
'track-list': [
{
type: 'sub',
id: 4,
external: true,
'external-filename': '/tmp/subs.ass',
},
],
sid: 4,
'sub-start': 2.0,
}),
loadSubtitleSourceText: async () => `[Events]
Dialogue: 0,0:00:00.50,0:00:01.50,Default,,0,0,0,,line-1
Dialogue: 0,0:00:02.00,0:00:03.00,Default,,0,0,0,,line-2
Dialogue: 0,0:00:04.00,0:00:05.00,Default,,0,0,0,,line-3`,
sendMpvCommand: (command) => commands.push(command),
showMpvOsd: () => {},
});
await handler('previous');
const delta = commands[0]?.[2];
assert.equal(typeof delta, 'number');
assert.equal(Math.abs((delta as number) + 1.5) < 0.0001, true);
});
test('shift subtitle delay throws when no next cue exists', async () => {
const handler = createShiftSubtitleDelayToAdjacentCueHandler({
getMpvClient: () =>
createMpvClient({
'track-list': [
{
type: 'sub',
id: 1,
external: true,
'external-filename': '/tmp/subs.vtt',
},
],
sid: 1,
'sub-start': 5.0,
}),
loadSubtitleSourceText: async () => `WEBVTT
00:00:01.000 --> 00:00:02.000
line-1
00:00:03.000 --> 00:00:04.000
line-2
00:00:05.000 --> 00:00:06.000
line-3`,
sendMpvCommand: () => {},
showMpvOsd: () => {},
});
await assert.rejects(() => handler('next'), /No next subtitle cue found/);
});

View File

@@ -0,0 +1,203 @@
type SubtitleDelayShiftDirection = 'next' | 'previous';
type MpvClientLike = {
connected: boolean;
requestProperty: (name: string) => Promise<unknown>;
};
type MpvSubtitleTrackLike = {
type?: unknown;
id?: unknown;
external?: unknown;
'external-filename'?: unknown;
};
type SubtitleCueCacheEntry = {
starts: number[];
};
type SubtitleDelayShiftDeps = {
getMpvClient: () => MpvClientLike | null;
loadSubtitleSourceText: (source: string) => Promise<string>;
sendMpvCommand: (command: Array<string | number>) => void;
showMpvOsd: (text: string) => void;
};
function asTrackId(value: unknown): number | null {
if (typeof value === 'number' && Number.isInteger(value)) return value;
if (typeof value === 'string') {
const parsed = Number(value.trim());
if (Number.isInteger(parsed)) return parsed;
}
return null;
}
function parseSrtOrVttStartTimes(content: string): number[] {
const starts: number[] = [];
const lines = content.split(/\r?\n/);
for (const line of lines) {
const match = line.match(
/^\s*(?:(\d{1,2}):)?(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(?:(\d{1,2}):)?(\d{2}):(\d{2})[,.](\d{1,3})/,
);
if (!match) continue;
const hours = Number(match[1] || 0);
const minutes = Number(match[2] || 0);
const seconds = Number(match[3] || 0);
const millis = Number(String(match[4]).padEnd(3, '0'));
starts.push(hours * 3600 + minutes * 60 + seconds + millis / 1000);
}
return starts;
}
function parseAssStartTimes(content: string): number[] {
const starts: number[] = [];
const lines = content.split(/\r?\n/);
for (const line of lines) {
const match = line.match(
/^Dialogue:[^,]*,(\d+:\d{2}:\d{2}\.\d{1,2}),\d+:\d{2}:\d{2}\.\d{1,2},/,
);
if (!match) continue;
const [hoursRaw, minutesRaw, secondsRaw] = match[1]!.split(':');
if (secondsRaw === undefined) continue;
const [wholeSecondsRaw, fractionRaw = '0'] = secondsRaw.split('.');
const hours = Number(hoursRaw);
const minutes = Number(minutesRaw);
const wholeSeconds = Number(wholeSecondsRaw);
const fraction = Number(`0.${fractionRaw}`);
starts.push(hours * 3600 + minutes * 60 + wholeSeconds + fraction);
}
return starts;
}
function normalizeCueStarts(starts: number[]): number[] {
const sorted = starts
.filter((value) => Number.isFinite(value) && value >= 0)
.sort((a, b) => a - b);
if (sorted.length === 0) return [];
const deduped: number[] = [sorted[0]!];
for (let i = 1; i < sorted.length; i += 1) {
const current = sorted[i]!;
const previous = deduped[deduped.length - 1]!;
if (Math.abs(current - previous) > 0.0005) {
deduped.push(current);
}
}
return deduped;
}
function parseCueStarts(content: string, source: string): number[] {
const normalizedSource = source.toLowerCase().split('?')[0] || '';
const parseSrtLike = () => parseSrtOrVttStartTimes(content);
const parseAssLike = () => parseAssStartTimes(content);
let starts: number[] = [];
if (normalizedSource.endsWith('.ass') || normalizedSource.endsWith('.ssa')) {
starts = parseAssLike();
if (starts.length === 0) {
starts = parseSrtLike();
}
} else {
starts = parseSrtLike();
if (starts.length === 0) {
starts = parseAssLike();
}
}
const normalized = normalizeCueStarts(starts);
if (normalized.length === 0) {
throw new Error('Could not parse subtitle cue timings from active subtitle source.');
}
return normalized;
}
function getActiveSubtitleSource(trackListRaw: unknown, sidRaw: unknown): string {
const sid = asTrackId(sidRaw);
if (sid === null) {
throw new Error('No active subtitle track selected.');
}
if (!Array.isArray(trackListRaw)) {
throw new Error('Could not inspect subtitle track list.');
}
const activeTrack = trackListRaw.find((entry): entry is MpvSubtitleTrackLike => {
if (!entry || typeof entry !== 'object') return false;
const track = entry as MpvSubtitleTrackLike;
return track.type === 'sub' && asTrackId(track.id) === sid;
});
if (!activeTrack) {
throw new Error('No active subtitle track found in mpv track list.');
}
if (activeTrack.external !== true) {
throw new Error('Active subtitle track is internal and has no direct subtitle file source.');
}
const source =
typeof activeTrack['external-filename'] === 'string'
? activeTrack['external-filename'].trim()
: '';
if (!source) {
throw new Error('Active subtitle track has no external subtitle source path.');
}
return source;
}
function findAdjacentCueStart(
starts: number[],
currentStart: number,
direction: SubtitleDelayShiftDirection,
): number {
const epsilon = 0.0005;
if (direction === 'next') {
const target = starts.find((value) => value > currentStart + epsilon);
if (target === undefined) {
throw new Error('No next subtitle cue found for active subtitle source.');
}
return target;
}
for (let index = starts.length - 1; index >= 0; index -= 1) {
const value = starts[index]!;
if (value < currentStart - epsilon) {
return value;
}
}
throw new Error('No previous subtitle cue found for active subtitle source.');
}
export function createShiftSubtitleDelayToAdjacentCueHandler(deps: SubtitleDelayShiftDeps) {
const cueCache = new Map<string, SubtitleCueCacheEntry>();
return async (direction: SubtitleDelayShiftDirection): Promise<void> => {
const client = deps.getMpvClient();
if (!client || !client.connected) {
throw new Error('MPV not connected.');
}
const [trackListRaw, sidRaw, subStartRaw] = await Promise.all([
client.requestProperty('track-list'),
client.requestProperty('sid'),
client.requestProperty('sub-start'),
]);
const currentStart =
typeof subStartRaw === 'number' && Number.isFinite(subStartRaw) ? subStartRaw : null;
if (currentStart === null) {
throw new Error('Current subtitle start time is unavailable.');
}
const source = getActiveSubtitleSource(trackListRaw, sidRaw);
let cueStarts = cueCache.get(source)?.starts;
if (!cueStarts) {
const content = await deps.loadSubtitleSourceText(source);
cueStarts = parseCueStarts(content, source);
cueCache.set(source, { starts: cueStarts });
}
const targetStart = findAdjacentCueStart(cueStarts, currentStart, direction);
const delta = targetStart - currentStart;
deps.sendMpvCommand(['add', 'sub-delay', delta]);
deps.showMpvOsd('Subtitle delay: ${sub-delay}');
};
}

View File

@@ -297,6 +297,97 @@ test('tokenizeSubtitle starts Yomitan frequency lookup and MeCab enrichment in p
assert.equal(result.tokens?.[0]?.frequencyRank, 77);
});
test('tokenizeSubtitle can signal tokenization-ready before enrichment completes', async () => {
const frequencyDeferred = createDeferred<unknown[]>();
const mecabDeferred = createDeferred<null>();
let tokenizationReadyText: string | null = null;
const pendingResult = tokenizeSubtitle(
'猫',
makeDeps({
onTokenizationReady: (text) => {
tokenizationReadyText = text;
},
getFrequencyDictionaryEnabled: () => true,
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async (script: string) => {
if (script.includes('getTermFrequencies')) {
return await frequencyDeferred.promise;
}
return [
{
source: 'scanning-parser',
index: 0,
content: [
[
{
text: '猫',
reading: 'ねこ',
headwords: [[{ term: '猫' }]],
},
],
],
},
];
},
},
}) as unknown as Electron.BrowserWindow,
tokenizeWithMecab: async () => {
return await mecabDeferred.promise;
},
}),
);
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tokenizationReadyText, '猫');
frequencyDeferred.resolve([]);
mecabDeferred.resolve(null);
await pendingResult;
});
test('tokenizeSubtitle appends trailing kana to merged Yomitan readings when headword equals surface', async () => {
const result = await tokenizeSubtitle(
'断じて見ていない',
makeDeps({
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async () => [
{
source: 'scanning-parser',
index: 0,
content: [
[
{ text: '断', reading: 'だん', headwords: [[{ term: '断じて' }]] },
{ text: 'じて', reading: '', headwords: [[{ term: 'じて' }]] },
],
[
{ text: '見', reading: 'み', headwords: [[{ term: '見る' }]] },
{ text: 'ていない', reading: '', headwords: [[{ term: 'ていない' }]] },
],
],
},
],
},
}) as unknown as Electron.BrowserWindow,
}),
);
assert.equal(result.tokens?.length, 2);
assert.equal(result.tokens?.[0]?.surface, '断じて');
assert.equal(result.tokens?.[0]?.reading, 'だんじて');
assert.equal(result.tokens?.[1]?.surface, '見ていない');
assert.equal(result.tokens?.[1]?.reading, 'み');
});
test('tokenizeSubtitle queries headword frequencies with token reading for disambiguation', async () => {
const result = await tokenizeSubtitle(
'鍛えた',
@@ -309,6 +400,11 @@ test('tokenizeSubtitle queries headword frequencies with token reading for disam
webContents: {
executeJavaScript: async (script: string) => {
if (script.includes('getTermFrequencies')) {
assert.equal(
script.includes('"term":"鍛える","reading":null'),
false,
'should not eagerly include term-only fallback pair when reading lookup is present',
);
if (!script.includes('"term":"鍛える","reading":"きた"')) {
return [];
}
@@ -351,6 +447,58 @@ test('tokenizeSubtitle queries headword frequencies with token reading for disam
assert.equal(result.tokens?.[0]?.frequencyRank, 2847);
});
test('tokenizeSubtitle falls back to term-only Yomitan frequency lookup when reading is noisy', async () => {
const result = await tokenizeSubtitle(
'断じて',
makeDeps({
getFrequencyDictionaryEnabled: () => true,
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async (script: string) => {
if (script.includes('getTermFrequencies')) {
if (!script.includes('"term":"断じて","reading":null')) {
return [];
}
return [
{
term: '断じて',
reading: null,
dictionary: 'freq-dict',
frequency: 7082,
displayValue: '7082',
displayValueParsed: true,
},
];
}
return [
{
source: 'scanning-parser',
index: 0,
content: [
[
{
text: '断じて',
reading: 'だん',
headwords: [[{ term: '断じて' }]],
},
],
],
},
];
},
},
}) as unknown as Electron.BrowserWindow,
}),
);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.frequencyRank, 7082);
});
test('tokenizeSubtitle avoids headword term-only fallback rank when reading-specific frequency exists', async () => {
const result = await tokenizeSubtitle(
'無人',
@@ -824,6 +972,34 @@ test('tokenizeSubtitle skips frequency rank when Yomitan token is enriched as pa
assert.equal(result.tokens?.[0]?.frequencyRank, undefined);
});
test('tokenizeSubtitle keeps frequency rank when mecab tags classify token as content-bearing', async () => {
const result = await tokenizeSubtitle(
'ふふ',
makeDepsFromYomitanTokens([{ surface: 'ふふ', reading: '', headword: 'ふふ' }], {
getFrequencyDictionaryEnabled: () => true,
getFrequencyRank: (text) => (text === 'ふふ' ? 3014 : null),
tokenizeWithMecab: async () => [
{
headword: 'ふふ',
surface: 'ふふ',
reading: 'フフ',
startPos: 0,
endPos: 2,
partOfSpeech: PartOfSpeech.verb,
pos1: '動詞',
pos2: '自立',
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
},
],
}),
);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.frequencyRank, 3014);
});
test('tokenizeSubtitle ignores invalid frequency ranks', async () => {
const result = await tokenizeSubtitle(
'猫',
@@ -995,6 +1171,106 @@ test('tokenizeSubtitle returns null tokens when Yomitan parsing is unavailable',
assert.deepEqual(result, { text: '猫です', tokens: null });
});
test('tokenizeSubtitle skips token payload and annotations when Yomitan parse has no dictionary matches', async () => {
let frequencyRequested = false;
let jlptLookupCalls = 0;
let mecabCalls = 0;
const result = await tokenizeSubtitle(
'これはテスト',
makeDeps({
getFrequencyDictionaryEnabled: () => true,
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async (script: string) => {
if (script.includes('getTermFrequencies')) {
frequencyRequested = true;
return [];
}
return [
{
source: 'scanning-parser',
index: 0,
content: [
[{ text: 'これは', reading: 'これは' }],
[{ text: 'テスト', reading: 'てすと' }],
],
},
];
},
},
}) as unknown as Electron.BrowserWindow,
tokenizeWithMecab: async () => {
mecabCalls += 1;
return null;
},
getJlptLevel: () => {
jlptLookupCalls += 1;
return 'N5';
},
}),
);
assert.deepEqual(result, { text: 'これはテスト', tokens: null });
assert.equal(frequencyRequested, false);
assert.equal(jlptLookupCalls, 0);
assert.equal(mecabCalls, 0);
});
test('tokenizeSubtitle excludes Yomitan token groups without dictionary headwords from annotation paths', async () => {
let jlptLookupCalls = 0;
let frequencyLookupCalls = 0;
const result = await tokenizeSubtitle(
'(ダクネスの荒い息) 猫',
makeDeps({
getFrequencyDictionaryEnabled: () => true,
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async (script: string) => {
if (script.includes('getTermFrequencies')) {
return [];
}
return [
{
source: 'scanning-parser',
index: 0,
content: [
[{ text: '(ダクネスの荒い息)', reading: 'だくねすのあらいいき' }],
[{ text: '猫', reading: 'ねこ', headwords: [[{ term: '猫' }]] }],
],
},
];
},
},
}) as unknown as Electron.BrowserWindow,
getJlptLevel: (text) => {
jlptLookupCalls += 1;
return text === '猫' ? 'N5' : null;
},
getFrequencyRank: () => {
frequencyLookupCalls += 1;
return 12;
},
tokenizeWithMecab: async () => null,
}),
);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.surface, '猫');
assert.equal(result.tokens?.[0]?.headword, '猫');
assert.equal(jlptLookupCalls, 1);
assert.equal(frequencyLookupCalls, 1);
});
test('tokenizeSubtitle returns null tokens when mecab throws', async () => {
const result = await tokenizeSubtitle(
'猫です',
@@ -1008,7 +1284,7 @@ test('tokenizeSubtitle returns null tokens when mecab throws', async () => {
assert.deepEqual(result, { text: '猫です', tokens: null });
});
test('tokenizeSubtitle uses Yomitan parser result when available', async () => {
test('tokenizeSubtitle uses Yomitan parser result when available and drops no-headword groups', async () => {
const parserWindow = {
isDestroyed: () => false,
webContents: {
@@ -1046,13 +1322,10 @@ test('tokenizeSubtitle uses Yomitan parser result when available', async () => {
);
assert.equal(result.text, '猫です');
assert.equal(result.tokens?.length, 2);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.surface, '猫');
assert.equal(result.tokens?.[0]?.reading, 'ねこ');
assert.equal(result.tokens?.[0]?.isKnown, false);
assert.equal(result.tokens?.[1]?.surface, 'です');
assert.equal(result.tokens?.[1]?.reading, 'です');
assert.equal(result.tokens?.[1]?.isKnown, false);
});
test('tokenizeSubtitle logs selected Yomitan groups when debug toggle is enabled', async () => {
@@ -2014,6 +2287,48 @@ test('createTokenizerDepsRuntime checks MeCab availability before first tokenize
assert.equal(second?.[0]?.surface, '仮面');
});
test('createTokenizerDepsRuntime skips known-word lookup for MeCab POS enrichment tokens', async () => {
let knownWordCalls = 0;
const deps = createTokenizerDepsRuntime({
getYomitanExt: () => null,
getYomitanParserWindow: () => null,
setYomitanParserWindow: () => {},
getYomitanParserReadyPromise: () => null,
setYomitanParserReadyPromise: () => {},
getYomitanParserInitPromise: () => null,
setYomitanParserInitPromise: () => {},
isKnownWord: () => {
knownWordCalls += 1;
return true;
},
getKnownWordMatchMode: () => 'headword',
getJlptLevel: () => null,
getMecabTokenizer: () => ({
tokenize: async () => [
{
word: '仮面',
partOfSpeech: PartOfSpeech.noun,
pos1: '名詞',
pos2: '一般',
pos3: '',
pos4: '',
inflectionType: '',
inflectionForm: '',
headword: '仮面',
katakanaReading: 'カメン',
pronunciation: 'カメン',
},
],
}),
});
const tokens = await deps.tokenizeWithMecab('仮面');
assert.equal(knownWordCalls, 0);
assert.equal(tokens?.[0]?.isKnown, false);
});
test('tokenizeSubtitle uses async MeCab enrichment override when provided', async () => {
const result = await tokenizeSubtitle(
'猫',
@@ -2180,7 +2495,6 @@ test('tokenizeSubtitle keeps frequency enrichment while n+1 is disabled', async
assert.equal(frequencyCalls, 1);
});
test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and frequency annotations', async () => {
const result = await tokenizeSubtitle(
'になれば',
@@ -2211,7 +2525,7 @@ test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and freque
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
});
test('tokenizeSubtitle keeps merged token when overlap contains at least one content pos1 tag', async () => {
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
const result = await tokenizeSubtitle(
'になれば',
makeDepsFromYomitanTokens([{ surface: 'になれば', reading: 'になれば', headword: 'なる' }], {
@@ -2264,7 +2578,7 @@ test('tokenizeSubtitle keeps merged token when overlap contains at least one con
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.pos1, '助詞|動詞');
assert.equal(result.tokens?.[0]?.frequencyRank, 13);
assert.equal(result.tokens?.[0]?.frequencyRank, undefined);
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, true);
});

View File

@@ -51,6 +51,7 @@ export interface TokenizerServiceDeps {
getYomitanGroupDebugEnabled?: () => boolean;
tokenizeWithMecab: (text: string) => Promise<MergedToken[] | null>;
enrichTokensWithMecab?: MecabTokenEnrichmentFn;
onTokenizationReady?: (text: string) => void;
}
interface MecabTokenizerLike {
@@ -78,6 +79,7 @@ export interface TokenizerDepsRuntimeOptions {
getMinSentenceWordsForNPlusOne?: () => number;
getYomitanGroupDebugEnabled?: () => boolean;
getMecabTokenizer: () => MecabTokenizerLike | null;
onTokenizationReady?: (text: string) => void;
}
interface TokenizerAnnotationOptions {
@@ -90,13 +92,14 @@ interface TokenizerAnnotationOptions {
pos2Exclusions: ReadonlySet<string>;
}
let parserEnrichmentWorkerRuntimeModulePromise:
| Promise<typeof import('./tokenizer/parser-enrichment-worker-runtime')>
| null = null;
let annotationStageModulePromise: Promise<typeof import('./tokenizer/annotation-stage')> | null = null;
let parserEnrichmentFallbackModulePromise:
| Promise<typeof import('./tokenizer/parser-enrichment-stage')>
| null = null;
let parserEnrichmentWorkerRuntimeModulePromise: Promise<
typeof import('./tokenizer/parser-enrichment-worker-runtime')
> | null = null;
let annotationStageModulePromise: Promise<typeof import('./tokenizer/annotation-stage')> | null =
null;
let parserEnrichmentFallbackModulePromise: Promise<
typeof import('./tokenizer/parser-enrichment-stage')
> | null = null;
const DEFAULT_ANNOTATION_POS1_EXCLUSIONS = resolveAnnotationPos1ExclusionSet(
DEFAULT_ANNOTATION_POS1_EXCLUSION_CONFIG,
);
@@ -104,7 +107,10 @@ const DEFAULT_ANNOTATION_POS2_EXCLUSIONS = resolveAnnotationPos2ExclusionSet(
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
);
function getKnownWordLookup(deps: TokenizerServiceDeps, options: TokenizerAnnotationOptions): (text: string) => boolean {
function getKnownWordLookup(
deps: TokenizerServiceDeps,
options: TokenizerAnnotationOptions,
): (text: string) => boolean {
if (!options.nPlusOneEnabled) {
return () => false;
}
@@ -124,7 +130,8 @@ async function enrichTokensWithMecabAsync(
mecabTokens: MergedToken[] | null,
): Promise<MergedToken[]> {
if (!parserEnrichmentWorkerRuntimeModulePromise) {
parserEnrichmentWorkerRuntimeModulePromise = import('./tokenizer/parser-enrichment-worker-runtime');
parserEnrichmentWorkerRuntimeModulePromise =
import('./tokenizer/parser-enrichment-worker-runtime');
}
try {
@@ -183,8 +190,7 @@ export function createTokenizerDepsRuntime(
getNPlusOneEnabled: options.getNPlusOneEnabled,
getJlptEnabled: options.getJlptEnabled,
getFrequencyDictionaryEnabled: options.getFrequencyDictionaryEnabled,
getFrequencyDictionaryMatchMode:
options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
getFrequencyDictionaryMatchMode: options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
getFrequencyRank: options.getFrequencyRank,
getMinSentenceWordsForNPlusOne: options.getMinSentenceWordsForNPlusOne ?? (() => 3),
getYomitanGroupDebugEnabled: options.getYomitanGroupDebugEnabled ?? (() => false),
@@ -211,11 +217,11 @@ export function createTokenizerDepsRuntime(
return null;
}
const isKnownWordLookup = options.getNPlusOneEnabled?.() === false ? () => false : options.isKnownWord;
return mergeTokens(rawTokens, isKnownWordLookup, options.getKnownWordMatchMode());
return mergeTokens(rawTokens, options.isKnownWord, options.getKnownWordMatchMode(), false);
},
enrichTokensWithMecab: async (tokens, mecabTokens) =>
enrichTokensWithMecabAsync(tokens, mecabTokens),
onTokenizationReady: options.onTokenizationReady,
};
}
@@ -249,6 +255,50 @@ function normalizeFrequencyLookupText(rawText: string): string {
return rawText.trim().toLowerCase();
}
function isKanaChar(char: string): boolean {
const code = char.codePointAt(0);
if (code === undefined) {
return false;
}
return (
(code >= 0x3041 && code <= 0x3096) ||
(code >= 0x309b && code <= 0x309f) ||
(code >= 0x30a0 && code <= 0x30fa) ||
(code >= 0x30fd && code <= 0x30ff)
);
}
function getTrailingKanaSuffix(surface: string): string {
const chars = Array.from(surface);
let splitIndex = chars.length;
while (splitIndex > 0 && isKanaChar(chars[splitIndex - 1]!)) {
splitIndex -= 1;
}
if (splitIndex <= 0 || splitIndex >= chars.length) {
return '';
}
return chars.slice(splitIndex).join('');
}
function normalizeYomitanMergedReading(token: MergedToken): string {
const reading = token.reading ?? '';
if (!reading || token.headword !== token.surface) {
return reading;
}
const trailingKanaSuffix = getTrailingKanaSuffix(token.surface);
if (!trailingKanaSuffix || reading.endsWith(trailingKanaSuffix)) {
return reading;
}
return `${reading}${trailingKanaSuffix}`;
}
function normalizeSelectedYomitanTokens(tokens: MergedToken[]): MergedToken[] {
return tokens.map((token) => ({
...token,
reading: normalizeYomitanMergedReading(token),
}));
}
function resolveFrequencyLookupText(
token: MergedToken,
matchMode: FrequencyDictionaryMatchMode,
@@ -276,17 +326,19 @@ function buildYomitanFrequencyTermReadingList(
tokens: MergedToken[],
matchMode: FrequencyDictionaryMatchMode,
): Array<{ term: string; reading: string | null }> {
return tokens
.map((token) => {
const term = resolveFrequencyLookupText(token, matchMode).trim();
if (!term) {
return null;
}
const readingRaw =
token.reading && token.reading.trim().length > 0 ? token.reading.trim() : null;
return { term, reading: readingRaw };
})
.filter((pair): pair is { term: string; reading: string | null } => pair !== null);
const termReadingList: Array<{ term: string; reading: string | null }> = [];
for (const token of tokens) {
const term = resolveFrequencyLookupText(token, matchMode).trim();
if (!term) {
continue;
}
const readingRaw =
token.reading && token.reading.trim().length > 0 ? token.reading.trim() : null;
termReadingList.push({ term, reading: readingRaw });
}
return termReadingList;
}
function buildYomitanFrequencyRankMap(
@@ -300,7 +352,8 @@ function buildYomitanFrequencyRankMap(
continue;
}
const dictionaryPriority =
typeof frequency.dictionaryPriority === 'number' && Number.isFinite(frequency.dictionaryPriority)
typeof frequency.dictionaryPriority === 'number' &&
Number.isFinite(frequency.dictionaryPriority)
? Math.max(0, Math.floor(frequency.dictionaryPriority))
: Number.MAX_SAFE_INTEGER;
const current = rankByTerm.get(normalizedTerm);
@@ -427,19 +480,25 @@ async function parseWithYomitanInternalParser(
if (!selectedTokens || selectedTokens.length === 0) {
return null;
}
const normalizedSelectedTokens = normalizeSelectedYomitanTokens(selectedTokens);
if (deps.getYomitanGroupDebugEnabled?.() === true) {
logSelectedYomitanGroups(text, selectedTokens);
logSelectedYomitanGroups(text, normalizedSelectedTokens);
}
deps.onTokenizationReady?.(text);
const frequencyRankPromise: Promise<Map<string, number>> = options.frequencyEnabled
? (async () => {
const frequencyMatchMode = options.frequencyMatchMode;
const termReadingList = buildYomitanFrequencyTermReadingList(
selectedTokens,
normalizedSelectedTokens,
frequencyMatchMode,
);
const yomitanFrequencies = await requestYomitanTermFrequencies(termReadingList, deps, logger);
const yomitanFrequencies = await requestYomitanTermFrequencies(
termReadingList,
deps,
logger,
);
return buildYomitanFrequencyRankMap(yomitanFrequencies);
})()
: Promise.resolve(new Map<string, number>());
@@ -449,19 +508,19 @@ async function parseWithYomitanInternalParser(
try {
const mecabTokens = await deps.tokenizeWithMecab(text);
const enrichTokensWithMecab = deps.enrichTokensWithMecab ?? enrichTokensWithMecabAsync;
return await enrichTokensWithMecab(selectedTokens, mecabTokens);
return await enrichTokensWithMecab(normalizedSelectedTokens, mecabTokens);
} catch (err) {
const error = err as Error;
logger.warn(
'Failed to enrich Yomitan tokens with MeCab POS:',
error.message,
`tokenCount=${selectedTokens.length}`,
`tokenCount=${normalizedSelectedTokens.length}`,
`textLength=${text.length}`,
);
return selectedTokens;
return normalizedSelectedTokens;
}
})()
: Promise.resolve(selectedTokens);
: Promise.resolve(normalizedSelectedTokens);
const [yomitanRankByTerm, enrichedTokens] = await Promise.all([
frequencyRankPromise,

View File

@@ -314,6 +314,26 @@ test('annotateTokens excludes likely kana SFX tokens from frequency when POS tag
assert.equal(result[0]?.frequencyRank, undefined);
});
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
const tokens = [
makeToken({
surface: 'ふふ',
headword: 'ふふ',
pos1: '動詞',
pos2: '自立',
frequencyRank: 3014,
startPos: 0,
endPos: 2,
}),
];
const result = annotateTokens(tokens, makeDeps(), {
minSentenceWordsForNPlusOne: 1,
});
assert.equal(result[0]?.frequencyRank, 3014);
});
test('annotateTokens allows previously default-excluded pos2 when removed from effective set', () => {
const tokens = [
makeToken({
@@ -337,7 +357,7 @@ test('annotateTokens allows previously default-excluded pos2 when removed from e
assert.equal(result[0]?.isNPlusOneTarget, true);
});
test('annotateTokens keeps composite tokens when any component pos tag is content-bearing', () => {
test('annotateTokens excludes composite function/content tokens from frequency but keeps N+1 eligible', () => {
const tokens = [
makeToken({
surface: 'になれば',
@@ -354,7 +374,7 @@ test('annotateTokens keeps composite tokens when any component pos tag is conten
minSentenceWordsForNPlusOne: 1,
});
assert.equal(result[0]?.frequencyRank, 5);
assert.equal(result[0]?.frequencyRank, undefined);
assert.equal(result[0]?.isNPlusOneTarget, true);
});

View File

@@ -73,8 +73,9 @@ function isExcludedByTagSet(normalizedTag: string, exclusions: ReadonlySet<strin
if (parts.length === 0) {
return false;
}
// Composite tags like "助詞|名詞" stay eligible unless every component is excluded.
return parts.every((part) => exclusions.has(part));
// Frequency highlighting should be conservative: if any merged component is excluded,
// skip highlighting the whole token to avoid noisy merged fragments.
return parts.some((part) => exclusions.has(part));
}
function resolvePos1Exclusions(options: AnnotationStageOptions): ReadonlySet<string> {

View File

@@ -39,6 +39,30 @@ test('enrichTokensWithMecabPos1 fills missing pos1 using surface-sequence fallba
assert.equal(enriched[0]?.pos1, '助詞');
});
test('enrichTokensWithMecabPos1 keeps partOfSpeech unchanged and only enriches POS tags', () => {
const tokens = [makeToken({ surface: 'これは', startPos: 0, endPos: 3 })];
const mecabTokens = [
makeToken({
surface: 'これ',
startPos: 0,
endPos: 2,
pos1: '名詞',
partOfSpeech: PartOfSpeech.noun,
}),
makeToken({
surface: 'は',
startPos: 2,
endPos: 3,
pos1: '助詞',
partOfSpeech: PartOfSpeech.particle,
}),
];
const enriched = enrichTokensWithMecabPos1(tokens, mecabTokens);
assert.equal(enriched[0]?.pos1, '名詞|助詞');
assert.equal(enriched[0]?.partOfSpeech, PartOfSpeech.other);
});
test('enrichTokensWithMecabPos1 passes through unchanged when mecab tokens are null or empty', () => {
const tokens = [makeToken({ surface: '猫', startPos: 0, endPos: 1 })];
@@ -48,3 +72,77 @@ test('enrichTokensWithMecabPos1 passes through unchanged when mecab tokens are n
const emptyResult = enrichTokensWithMecabPos1(tokens, []);
assert.strictEqual(emptyResult, tokens);
});
test('enrichTokensWithMecabPos1 avoids repeated full scans over distant mecab surfaces', () => {
const tokens = Array.from({ length: 12 }, (_, index) =>
makeToken({ surface: `w${index}`, startPos: index, endPos: index + 1, pos1: '' }),
);
const mecabTokens = tokens.map((token) =>
makeToken({
surface: token.surface,
startPos: token.startPos,
endPos: token.endPos,
pos1: '名詞',
}),
);
let distantSurfaceReads = 0;
const distantToken = makeToken({ surface: '遠', startPos: 500, endPos: 501, pos1: '記号' });
Object.defineProperty(distantToken, 'surface', {
configurable: true,
get() {
distantSurfaceReads += 1;
if (distantSurfaceReads > 3) {
throw new Error('repeated full scan detected');
}
return '遠';
},
});
mecabTokens.push(distantToken);
const enriched = enrichTokensWithMecabPos1(tokens, mecabTokens);
assert.equal(enriched.length, tokens.length);
for (const token of enriched) {
assert.equal(token.pos1, '名詞');
}
});
test('enrichTokensWithMecabPos1 avoids repeated active-candidate filter scans', () => {
const tokens = Array.from({ length: 8 }, (_, index) =>
makeToken({ surface: `u${index}`, startPos: index, endPos: index + 1, pos1: '' }),
);
const mecabTokens = [
makeToken({ surface: 'SENTINEL', startPos: 0, endPos: 100, pos1: '記号' }),
...tokens.map((token, index) =>
makeToken({
surface: `m${index}`,
startPos: token.startPos,
endPos: token.endPos,
pos1: '名詞',
}),
),
];
let sentinelFilterCalls = 0;
const originalFilter = Array.prototype.filter;
Array.prototype.filter = function filterWithSentinelCheck(
this: unknown[],
...args: any[]
): any[] {
const target = this as Array<{ surface?: string }>;
if (target.some((candidate) => candidate?.surface === 'SENTINEL')) {
sentinelFilterCalls += 1;
if (sentinelFilterCalls > 2) {
throw new Error('repeated active candidate filter scan detected');
}
}
return (originalFilter as (...params: any[]) => any[]).apply(this, args);
} as typeof Array.prototype.filter;
try {
const enriched = enrichTokensWithMecabPos1(tokens, mecabTokens);
assert.equal(enriched.length, tokens.length);
} finally {
Array.prototype.filter = originalFilter;
}
});

View File

@@ -6,6 +6,120 @@ type MecabPosMetadata = {
pos3?: string;
};
type IndexedMecabToken = {
index: number;
start: number;
end: number;
surface: string;
pos1: string;
pos2?: string;
pos3?: string;
};
type MecabLookup = {
indexedTokens: IndexedMecabToken[];
byExactSurface: Map<string, IndexedMecabToken[]>;
byTrimmedSurface: Map<string, IndexedMecabToken[]>;
byPosition: Map<number, IndexedMecabToken[]>;
};
function pushMapValue<K, T>(map: Map<K, T[]>, key: K, value: T): void {
const existing = map.get(key);
if (existing) {
existing.push(value);
return;
}
map.set(key, [value]);
}
function toDiscreteSpan(start: number, end: number): { start: number; end: number } {
const discreteStart = Math.floor(start);
const discreteEnd = Math.max(discreteStart + 1, Math.ceil(end));
return {
start: discreteStart,
end: discreteEnd,
};
}
function buildMecabLookup(mecabTokens: MergedToken[]): MecabLookup {
const indexedTokens: IndexedMecabToken[] = [];
for (const [index, token] of mecabTokens.entries()) {
const pos1 = token.pos1;
if (!pos1) {
continue;
}
const surface = token.surface;
const start = token.startPos ?? 0;
const end = token.endPos ?? start + surface.length;
indexedTokens.push({
index,
start,
end,
surface,
pos1,
pos2: token.pos2,
pos3: token.pos3,
});
}
const byExactSurface = new Map<string, IndexedMecabToken[]>();
const byTrimmedSurface = new Map<string, IndexedMecabToken[]>();
const byPosition = new Map<number, IndexedMecabToken[]>();
for (const token of indexedTokens) {
pushMapValue(byExactSurface, token.surface, token);
const trimmedSurface = token.surface.trim();
if (trimmedSurface) {
pushMapValue(byTrimmedSurface, trimmedSurface, token);
}
const discreteSpan = toDiscreteSpan(token.start, token.end);
for (let position = discreteSpan.start; position < discreteSpan.end; position += 1) {
pushMapValue(byPosition, position, token);
}
}
const byStartThenIndexSort = (left: IndexedMecabToken, right: IndexedMecabToken) =>
left.start - right.start || left.index - right.index;
for (const candidates of byExactSurface.values()) {
candidates.sort(byStartThenIndexSort);
}
return {
indexedTokens,
byExactSurface,
byTrimmedSurface,
byPosition,
};
}
function lowerBoundByStart(candidates: IndexedMecabToken[], targetStart: number): number {
let low = 0;
let high = candidates.length;
while (low < high) {
const mid = Math.floor((low + high) / 2);
if (candidates[mid]!.start < targetStart) {
low = mid + 1;
} else {
high = mid;
}
}
return low;
}
function lowerBoundByIndex(candidates: IndexedMecabToken[], targetIndex: number): number {
let low = 0;
let high = candidates.length;
while (low < high) {
const mid = Math.floor((low + high) / 2);
if (candidates[mid]!.index < targetIndex) {
low = mid + 1;
} else {
high = mid;
}
}
return low;
}
function joinUniqueTags(values: Array<string | undefined>): string | undefined {
const unique: string[] = [];
for (const value of values) {
@@ -29,87 +143,129 @@ function joinUniqueTags(values: Array<string | undefined>): string | undefined {
return unique.join('|');
}
function pickClosestMecabPosMetadata(
function pickClosestMecabPosMetadataBySurface(
token: MergedToken,
mecabTokens: MergedToken[],
candidates: IndexedMecabToken[] | undefined,
): MecabPosMetadata | null {
if (mecabTokens.length === 0) {
if (!candidates || candidates.length === 0) {
return null;
}
const tokenStart = token.startPos ?? 0;
const tokenEnd = token.endPos ?? tokenStart + token.surface.length;
let bestSurfaceMatchToken: MergedToken | null = null;
let bestSurfaceMatchToken: IndexedMecabToken | null = null;
let bestSurfaceMatchDistance = Number.MAX_SAFE_INTEGER;
let bestSurfaceMatchEndDistance = Number.MAX_SAFE_INTEGER;
let bestSurfaceMatchIndex = Number.MAX_SAFE_INTEGER;
for (const mecabToken of mecabTokens) {
if (!mecabToken.pos1) {
continue;
const nearestStartIndex = lowerBoundByStart(candidates, tokenStart);
let left = nearestStartIndex - 1;
let right = nearestStartIndex;
while (left >= 0 || right < candidates.length) {
const leftDistance =
left >= 0 ? Math.abs(candidates[left]!.start - tokenStart) : Number.MAX_SAFE_INTEGER;
const rightDistance =
right < candidates.length
? Math.abs(candidates[right]!.start - tokenStart)
: Number.MAX_SAFE_INTEGER;
const nearestDistance = Math.min(leftDistance, rightDistance);
if (nearestDistance > bestSurfaceMatchDistance) {
break;
}
if (mecabToken.surface !== token.surface) {
continue;
if (leftDistance === nearestDistance && left >= 0) {
const candidate = candidates[left]!;
const startDistance = Math.abs(candidate.start - tokenStart);
const endDistance = Math.abs(candidate.end - tokenEnd);
if (
startDistance < bestSurfaceMatchDistance ||
(startDistance === bestSurfaceMatchDistance &&
(endDistance < bestSurfaceMatchEndDistance ||
(endDistance === bestSurfaceMatchEndDistance &&
candidate.index < bestSurfaceMatchIndex)))
) {
bestSurfaceMatchDistance = startDistance;
bestSurfaceMatchEndDistance = endDistance;
bestSurfaceMatchIndex = candidate.index;
bestSurfaceMatchToken = candidate;
}
left -= 1;
}
const mecabStart = mecabToken.startPos ?? 0;
const mecabEnd = mecabToken.endPos ?? mecabStart + mecabToken.surface.length;
const startDistance = Math.abs(mecabStart - tokenStart);
const endDistance = Math.abs(mecabEnd - tokenEnd);
if (
startDistance < bestSurfaceMatchDistance ||
(startDistance === bestSurfaceMatchDistance && endDistance < bestSurfaceMatchEndDistance)
) {
bestSurfaceMatchDistance = startDistance;
bestSurfaceMatchEndDistance = endDistance;
bestSurfaceMatchToken = mecabToken;
if (rightDistance === nearestDistance && right < candidates.length) {
const candidate = candidates[right]!;
const startDistance = Math.abs(candidate.start - tokenStart);
const endDistance = Math.abs(candidate.end - tokenEnd);
if (
startDistance < bestSurfaceMatchDistance ||
(startDistance === bestSurfaceMatchDistance &&
(endDistance < bestSurfaceMatchEndDistance ||
(endDistance === bestSurfaceMatchEndDistance &&
candidate.index < bestSurfaceMatchIndex)))
) {
bestSurfaceMatchDistance = startDistance;
bestSurfaceMatchEndDistance = endDistance;
bestSurfaceMatchIndex = candidate.index;
bestSurfaceMatchToken = candidate;
}
right += 1;
}
}
if (bestSurfaceMatchToken) {
if (bestSurfaceMatchToken !== null) {
return {
pos1: bestSurfaceMatchToken.pos1 as string,
pos1: bestSurfaceMatchToken.pos1,
pos2: bestSurfaceMatchToken.pos2,
pos3: bestSurfaceMatchToken.pos3,
};
}
let bestToken: MergedToken | null = null;
return null;
}
function pickClosestMecabPosMetadataByOverlap(
token: MergedToken,
candidates: IndexedMecabToken[],
): MecabPosMetadata | null {
const tokenStart = token.startPos ?? 0;
const tokenEnd = token.endPos ?? tokenStart + token.surface.length;
let bestToken: IndexedMecabToken | null = null;
let bestOverlap = 0;
let bestSpan = 0;
let bestStartDistance = Number.MAX_SAFE_INTEGER;
let bestStart = Number.MAX_SAFE_INTEGER;
const overlappingTokens: MergedToken[] = [];
let bestIndex = Number.MAX_SAFE_INTEGER;
const overlappingTokens: IndexedMecabToken[] = [];
for (const mecabToken of mecabTokens) {
if (!mecabToken.pos1) {
continue;
}
const mecabStart = mecabToken.startPos ?? 0;
const mecabEnd = mecabToken.endPos ?? mecabStart + mecabToken.surface.length;
for (const candidate of candidates) {
const mecabStart = candidate.start;
const mecabEnd = candidate.end;
const overlapStart = Math.max(tokenStart, mecabStart);
const overlapEnd = Math.min(tokenEnd, mecabEnd);
const overlap = Math.max(0, overlapEnd - overlapStart);
if (overlap === 0) {
continue;
}
overlappingTokens.push(mecabToken);
overlappingTokens.push(candidate);
const span = mecabEnd - mecabStart;
const startDistance = Math.abs(mecabStart - tokenStart);
if (
overlap > bestOverlap ||
(overlap === bestOverlap &&
(Math.abs(mecabStart - tokenStart) < bestStartDistance ||
(Math.abs(mecabStart - tokenStart) === bestStartDistance &&
(span > bestSpan || (span === bestSpan && mecabStart < bestStart)))))
(startDistance < bestStartDistance ||
(startDistance === bestStartDistance &&
(span > bestSpan ||
(span === bestSpan &&
(mecabStart < bestStart ||
(mecabStart === bestStart && candidate.index < bestIndex)))))))
) {
bestOverlap = overlap;
bestSpan = span;
bestStartDistance = Math.abs(mecabStart - tokenStart);
bestStartDistance = startDistance;
bestStart = mecabStart;
bestToken = mecabToken;
bestIndex = candidate.index;
bestToken = candidate;
}
}
@@ -117,12 +273,21 @@ function pickClosestMecabPosMetadata(
return null;
}
const overlapPos1 = joinUniqueTags(overlappingTokens.map((token) => token.pos1));
const overlapPos2 = joinUniqueTags(overlappingTokens.map((token) => token.pos2));
const overlapPos3 = joinUniqueTags(overlappingTokens.map((token) => token.pos3));
const overlappingTokensByMecabOrder = overlappingTokens
.slice()
.sort((left, right) => left.index - right.index);
const overlapPos1 = joinUniqueTags(
overlappingTokensByMecabOrder.map((candidate) => candidate.pos1),
);
const overlapPos2 = joinUniqueTags(
overlappingTokensByMecabOrder.map((candidate) => candidate.pos2),
);
const overlapPos3 = joinUniqueTags(
overlappingTokensByMecabOrder.map((candidate) => candidate.pos3),
);
return {
pos1: overlapPos1 ?? (bestToken.pos1 as string),
pos1: overlapPos1 ?? bestToken.pos1,
pos2: overlapPos2 ?? bestToken.pos2,
pos3: overlapPos3 ?? bestToken.pos3,
};
@@ -130,13 +295,9 @@ function pickClosestMecabPosMetadata(
function fillMissingPos1BySurfaceSequence(
tokens: MergedToken[],
mecabTokens: MergedToken[],
byTrimmedSurface: Map<string, IndexedMecabToken[]>,
): MergedToken[] {
const indexedMecabTokens = mecabTokens
.map((token, index) => ({ token, index }))
.filter(({ token }) => token.pos1 && token.surface.trim().length > 0);
if (indexedMecabTokens.length === 0) {
if (byTrimmedSurface.size === 0) {
return tokens;
}
@@ -151,27 +312,13 @@ function fillMissingPos1BySurfaceSequence(
return token;
}
let best: { token: MergedToken; index: number } | null = null;
for (const candidate of indexedMecabTokens) {
if (candidate.token.surface !== surface) {
continue;
}
if (candidate.index < cursor) {
continue;
}
best = { token: candidate.token, index: candidate.index };
break;
const candidates = byTrimmedSurface.get(surface);
if (!candidates || candidates.length === 0) {
return token;
}
if (!best) {
for (const candidate of indexedMecabTokens) {
if (candidate.token.surface !== surface) {
continue;
}
best = { token: candidate.token, index: candidate.index };
break;
}
}
const atOrAfterCursorIndex = lowerBoundByIndex(candidates, cursor);
const best = candidates[atOrAfterCursorIndex] ?? candidates[0];
if (!best) {
return token;
@@ -180,13 +327,41 @@ function fillMissingPos1BySurfaceSequence(
cursor = best.index + 1;
return {
...token,
pos1: best.token.pos1,
pos2: best.token.pos2,
pos3: best.token.pos3,
pos1: best.pos1,
pos2: best.pos2,
pos3: best.pos3,
};
});
}
function collectOverlapCandidatesByPosition(
token: MergedToken,
byPosition: Map<number, IndexedMecabToken[]>,
): IndexedMecabToken[] {
const tokenStart = token.startPos ?? 0;
const tokenEnd = token.endPos ?? tokenStart + token.surface.length;
const discreteSpan = toDiscreteSpan(tokenStart, tokenEnd);
const seen = new Set<number>();
const overlapCandidates: IndexedMecabToken[] = [];
for (let position = discreteSpan.start; position < discreteSpan.end; position += 1) {
const candidatesAtPosition = byPosition.get(position);
if (!candidatesAtPosition) {
continue;
}
for (const candidate of candidatesAtPosition) {
if (seen.has(candidate.index)) {
continue;
}
seen.add(candidate.index);
overlapCandidates.push(candidate);
}
}
return overlapCandidates;
}
export function enrichTokensWithMecabPos1(
tokens: MergedToken[],
mecabTokens: MergedToken[] | null,
@@ -199,12 +374,36 @@ export function enrichTokensWithMecabPos1(
return tokens;
}
const overlapEnriched = tokens.map((token) => {
const lookup = buildMecabLookup(mecabTokens);
if (lookup.indexedTokens.length === 0) {
return tokens;
}
const metadataByTokenIndex = new Map<number, MecabPosMetadata>();
for (const [index, token] of tokens.entries()) {
if (token.pos1) {
return token;
continue;
}
const metadata = pickClosestMecabPosMetadata(token, mecabTokens);
const surfaceMetadata = pickClosestMecabPosMetadataBySurface(
token,
lookup.byExactSurface.get(token.surface),
);
if (surfaceMetadata) {
metadataByTokenIndex.set(index, surfaceMetadata);
continue;
}
const overlapCandidates = collectOverlapCandidatesByPosition(token, lookup.byPosition);
const overlapMetadata = pickClosestMecabPosMetadataByOverlap(token, overlapCandidates);
if (overlapMetadata) {
metadataByTokenIndex.set(index, overlapMetadata);
}
}
const overlapEnriched = tokens.map((token, index) => {
const metadata = metadataByTokenIndex.get(index);
if (!metadata) {
return token;
}
@@ -217,5 +416,5 @@ export function enrichTokensWithMecabPos1(
};
});
return fillMissingPos1BySurfaceSequence(overlapEnriched, mecabTokens);
return fillMissingPos1BySurfaceSequence(overlapEnriched, lookup.byTrimmedSurface);
}

View File

@@ -51,7 +51,7 @@ test('prefers scanning parser when scanning candidate has more than one token',
test('keeps scanning parser candidate when scanning candidate is single token', () => {
const parseResults = [
makeParseItem('scanning-parser', [
[{ text: '俺は公園にいきたい', reading: 'おれはこうえんにいきたい' }],
[{ text: '俺は公園にいきたい', reading: 'おれはこうえんにいきたい', headword: '行きたい' }],
]),
makeParseItem('mecab', [
[{ text: '俺', reading: 'おれ', headword: '俺' }],
@@ -96,3 +96,34 @@ test('returns null when only mecab-source candidates are present', () => {
const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword');
assert.equal(tokens, null);
});
test('returns null when scanning parser candidates have no dictionary headwords', () => {
const parseResults = [
makeParseItem('scanning-parser', [
[{ text: 'これは', reading: 'これは' }],
[{ text: 'テスト', reading: 'てすと' }],
]),
];
const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword');
assert.equal(tokens, null);
});
test('drops scanning parser tokens which have no dictionary headword', () => {
const parseResults = [
makeParseItem('scanning-parser', [
[{ text: '(ダクネスの荒い息)', reading: 'だくねすのあらいいき' }],
[{ text: 'アクア', reading: 'あくあ', headword: 'アクア' }],
[{ text: 'トラウマ', reading: 'とらうま', headword: 'トラウマ' }],
]),
];
const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword');
assert.deepEqual(
tokens?.map((token) => ({ surface: token.surface, headword: token.headword })),
[
{ surface: 'アクア', headword: 'アクア' },
{ surface: 'トラウマ', headword: 'トラウマ' },
],
);
});

View File

@@ -130,6 +130,7 @@ export function mapYomitanParseResultItemToMergedTokens(
const tokens: MergedToken[] = [];
let charOffset = 0;
let validLineCount = 0;
let hasDictionaryMatch = false;
for (const line of content) {
if (!isYomitanParseLine(line)) {
@@ -163,7 +164,13 @@ export function mapYomitanParseResultItemToMergedTokens(
const start = charOffset;
const end = start + combinedSurface.length;
charOffset = end;
const headword = combinedHeadword || combinedSurface;
if (!combinedHeadword) {
// No dictionary-backed headword for this merged unit; skip it entirely so
// downstream keyboard/frequency/JLPT flows only operate on lookup-backed tokens.
continue;
}
hasDictionaryMatch = true;
const headword = combinedHeadword;
tokens.push({
surface: combinedSurface,
@@ -182,7 +189,7 @@ export function mapYomitanParseResultItemToMergedTokens(
});
}
if (validLineCount === 0 || tokens.length === 0) {
if (validLineCount === 0 || tokens.length === 0 || !hasDictionaryMatch) {
return null;
}

View File

@@ -1,6 +1,7 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import {
requestYomitanParseResults,
requestYomitanTermFrequencies,
syncYomitanDefaultAnkiServer,
} from './yomitan-parser-runtime';
@@ -40,18 +41,63 @@ test('syncYomitanDefaultAnkiServer updates default profile server when script re
assert.equal(updated, true);
assert.match(scriptValue, /optionsGetFull/);
assert.match(scriptValue, /setAllSettings/);
assert.match(scriptValue, /profileCurrent/);
assert.match(scriptValue, /forceOverride = false/);
assert.equal(infoLogs.length, 1);
});
test('syncYomitanDefaultAnkiServer returns false when script reports no change', async () => {
test('syncYomitanDefaultAnkiServer returns true when script reports no change', async () => {
const deps = createDeps(async () => ({ updated: false }));
let infoLogCount = 0;
const updated = await syncYomitanDefaultAnkiServer('http://127.0.0.1:8766', deps, {
const synced = await syncYomitanDefaultAnkiServer('http://127.0.0.1:8766', deps, {
error: () => undefined,
info: () => undefined,
info: () => {
infoLogCount += 1;
},
});
assert.equal(updated, false);
assert.equal(synced, true);
assert.equal(infoLogCount, 0);
});
test('syncYomitanDefaultAnkiServer returns false when existing non-default server blocks update', async () => {
const deps = createDeps(async () => ({
updated: false,
matched: false,
reason: 'blocked-existing-server',
}));
const infoLogs: string[] = [];
const synced = await syncYomitanDefaultAnkiServer('http://127.0.0.1:8766', deps, {
error: () => undefined,
info: (message) => infoLogs.push(message),
});
assert.equal(synced, false);
assert.equal(infoLogs.length, 1);
assert.match(infoLogs[0] ?? '', /blocked-existing-server/);
});
test('syncYomitanDefaultAnkiServer injects force override when enabled', async () => {
let scriptValue = '';
const deps = createDeps(async (script) => {
scriptValue = script;
return { updated: false, matched: true };
});
const synced = await syncYomitanDefaultAnkiServer(
'http://127.0.0.1:8766',
deps,
{
error: () => undefined,
info: () => undefined,
},
{ forceOverride: true },
);
assert.equal(synced, true);
assert.match(scriptValue, /forceOverride = true/);
});
test('syncYomitanDefaultAnkiServer logs and returns false on script failure', async () => {
@@ -152,6 +198,102 @@ test('requestYomitanTermFrequencies prefers primary rank from displayValue array
assert.equal(result[0]?.frequency, 7141);
});
test('requestYomitanTermFrequencies requests term-only fallback only after reading miss', async () => {
const frequencyScripts: string[] = [];
const deps = createDeps(async (script) => {
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
dictionaries: [{ name: 'freq-dict', enabled: true, id: 0 }],
},
},
],
};
}
if (!script.includes('getTermFrequencies')) {
return [];
}
frequencyScripts.push(script);
if (script.includes('"term":"断じて","reading":"だん"')) {
return [];
}
if (script.includes('"term":"断じて","reading":null')) {
return [
{
term: '断じて',
reading: null,
dictionary: 'freq-dict',
frequency: 7082,
displayValue: '7082',
displayValueParsed: true,
},
];
}
return [];
});
const result = await requestYomitanTermFrequencies([{ term: '断じて', reading: 'だん' }], deps, {
error: () => undefined,
});
assert.equal(result.length, 1);
assert.equal(result[0]?.frequency, 7082);
assert.equal(frequencyScripts.length, 2);
assert.match(frequencyScripts[0] ?? '', /"term":"断じて","reading":"だん"/);
assert.doesNotMatch(frequencyScripts[0] ?? '', /"term":"断じて","reading":null/);
assert.match(frequencyScripts[1] ?? '', /"term":"断じて","reading":null/);
});
test('requestYomitanTermFrequencies avoids term-only fallback request when reading lookup succeeds', async () => {
const frequencyScripts: string[] = [];
const deps = createDeps(async (script) => {
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
dictionaries: [{ name: 'freq-dict', enabled: true, id: 0 }],
},
},
],
};
}
if (!script.includes('getTermFrequencies')) {
return [];
}
frequencyScripts.push(script);
return [
{
term: '鍛える',
reading: 'きたえる',
dictionary: 'freq-dict',
frequency: 2847,
displayValue: '2847',
displayValueParsed: true,
},
];
});
const result = await requestYomitanTermFrequencies([{ term: '鍛える', reading: 'きた' }], deps, {
error: () => undefined,
});
assert.equal(result.length, 1);
assert.equal(frequencyScripts.length, 1);
assert.match(frequencyScripts[0] ?? '', /"term":"鍛える","reading":"きた"/);
assert.doesNotMatch(frequencyScripts[0] ?? '', /"term":"鍛える","reading":null/);
});
test('requestYomitanTermFrequencies caches profile metadata between calls', async () => {
const scripts: string[] = [];
const deps = createDeps(async (script) => {
@@ -246,3 +388,32 @@ test('requestYomitanTermFrequencies caches repeated term+reading lookups', async
const frequencyCalls = scripts.filter((script) => script.includes('getTermFrequencies')).length;
assert.equal(frequencyCalls, 1);
});
test('requestYomitanParseResults disables Yomitan MeCab parser path', async () => {
const scripts: string[] = [];
const deps = createDeps(async (script) => {
scripts.push(script);
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return [];
});
const result = await requestYomitanParseResults('猫です', deps, {
error: () => undefined,
});
assert.deepEqual(result, []);
const parseScript = scripts.find((script) => script.includes('parseText'));
assert.ok(parseScript, 'expected parseText request script');
assert.match(parseScript ?? '', /useMecabParser:\s*false/);
});

View File

@@ -39,7 +39,10 @@ interface YomitanProfileMetadata {
const DEFAULT_YOMITAN_SCAN_LENGTH = 40;
const yomitanProfileMetadataByWindow = new WeakMap<BrowserWindow, YomitanProfileMetadata>();
const yomitanFrequencyCacheByWindow = new WeakMap<BrowserWindow, Map<string, YomitanTermFrequency[]>>();
const yomitanFrequencyCacheByWindow = new WeakMap<
BrowserWindow,
Map<string, YomitanTermFrequency[]>
>();
function isObject(value: unknown): value is Record<string, unknown> {
return Boolean(value && typeof value === 'object');
@@ -87,7 +90,7 @@ function parsePositiveFrequencyString(value: string): number | null {
const chunks = numericPrefix.split(',');
const normalizedNumber =
chunks.length <= 1
? chunks[0] ?? ''
? (chunks[0] ?? '')
: chunks.slice(1).every((chunk) => /^\d{3}$/.test(chunk))
? chunks.join('')
: (chunks[0] ?? '');
@@ -145,11 +148,7 @@ function toYomitanTermFrequency(value: unknown): YomitanTermFrequency | null {
: Number.MAX_SAFE_INTEGER;
const reading =
value.reading === null
? null
: typeof value.reading === 'string'
? value.reading
: null;
value.reading === null ? null : typeof value.reading === 'string' ? value.reading : null;
const displayValue = typeof displayValueRaw === 'string' ? displayValueRaw : null;
const displayValueParsed = value.displayValueParsed === true;
@@ -164,7 +163,9 @@ function toYomitanTermFrequency(value: unknown): YomitanTermFrequency | null {
};
}
function normalizeTermReadingList(termReadingList: YomitanTermReadingPair[]): YomitanTermReadingPair[] {
function normalizeTermReadingList(
termReadingList: YomitanTermReadingPair[],
): YomitanTermReadingPair[] {
const normalized: YomitanTermReadingPair[] = [];
const seen = new Set<string>();
@@ -174,7 +175,9 @@ function normalizeTermReadingList(termReadingList: YomitanTermReadingPair[]): Yo
continue;
}
const reading =
typeof pair.reading === 'string' && pair.reading.trim().length > 0 ? pair.reading.trim() : null;
typeof pair.reading === 'string' && pair.reading.trim().length > 0
? pair.reading.trim()
: null;
const key = `${term}\u0000${reading ?? ''}`;
if (seen.has(key)) {
continue;
@@ -298,7 +301,9 @@ function groupFrequencyEntriesByPair(
const grouped = new Map<string, YomitanTermFrequency[]>();
for (const entry of entries) {
const reading =
typeof entry.reading === 'string' && entry.reading.trim().length > 0 ? entry.reading.trim() : null;
typeof entry.reading === 'string' && entry.reading.trim().length > 0
? entry.reading.trim()
: null;
const key = makeTermReadingCacheKey(entry.term.trim(), reading);
const existing = grouped.get(key);
if (existing) {
@@ -529,7 +534,7 @@ export async function requestYomitanParseResults(
optionsContext: { index: ${metadata.profileIndex} },
scanLength: ${metadata.scanLength},
useInternalParser: true,
useMecabParser: true
useMecabParser: false
});
})();
`
@@ -564,7 +569,7 @@ export async function requestYomitanParseResults(
optionsContext: { index: profileIndex },
scanLength,
useInternalParser: true,
useMecabParser: true
useMecabParser: false
});
})();
`;
@@ -578,6 +583,144 @@ export async function requestYomitanParseResults(
}
}
async function fetchYomitanTermFrequencies(
parserWindow: BrowserWindow,
termReadingList: YomitanTermReadingPair[],
metadata: YomitanProfileMetadata | null,
logger: LoggerLike,
): Promise<YomitanTermFrequency[] | null> {
if (metadata && metadata.dictionaries.length > 0) {
const script = `
(async () => {
const invoke = (action, params) =>
new Promise((resolve, reject) => {
chrome.runtime.sendMessage({ action, params }, (response) => {
if (chrome.runtime.lastError) {
reject(new Error(chrome.runtime.lastError.message));
return;
}
if (!response || typeof response !== "object") {
reject(new Error("Invalid response from Yomitan backend"));
return;
}
if (response.error) {
reject(new Error(response.error.message || "Yomitan backend error"));
return;
}
resolve(response.result);
});
});
return await invoke("getTermFrequencies", {
termReadingList: ${JSON.stringify(termReadingList)},
dictionaries: ${JSON.stringify(metadata.dictionaries)}
});
})();
`;
try {
const rawResult = await parserWindow.webContents.executeJavaScript(script, true);
return Array.isArray(rawResult)
? normalizeFrequencyEntriesWithPriority(rawResult, metadata.dictionaryPriorityByName)
: [];
} catch (err) {
logger.error('Yomitan term frequency request failed:', (err as Error).message);
return null;
}
}
const script = `
(async () => {
const invoke = (action, params) =>
new Promise((resolve, reject) => {
chrome.runtime.sendMessage({ action, params }, (response) => {
if (chrome.runtime.lastError) {
reject(new Error(chrome.runtime.lastError.message));
return;
}
if (!response || typeof response !== "object") {
reject(new Error("Invalid response from Yomitan backend"));
return;
}
if (response.error) {
reject(new Error(response.error.message || "Yomitan backend error"));
return;
}
resolve(response.result);
});
});
const optionsFull = await invoke("optionsGetFull", undefined);
const profileIndex = optionsFull.profileCurrent;
const dictionariesRaw = optionsFull.profiles?.[profileIndex]?.options?.dictionaries ?? [];
const dictionaryEntries = Array.isArray(dictionariesRaw)
? dictionariesRaw
.filter((entry) => entry && typeof entry === "object" && entry.enabled === true && typeof entry.name === "string")
.map((entry, index) => ({
name: entry.name,
id: typeof entry.id === "number" && Number.isFinite(entry.id) ? Math.floor(entry.id) : index
}))
.sort((a, b) => a.id - b.id)
: [];
const dictionaries = dictionaryEntries.map((entry) => entry.name);
const dictionaryPriorityByName = dictionaryEntries.reduce((acc, entry, index) => {
acc[entry.name] = index;
return acc;
}, {});
if (dictionaries.length === 0) {
return [];
}
const rawFrequencies = await invoke("getTermFrequencies", {
termReadingList: ${JSON.stringify(termReadingList)},
dictionaries
});
if (!Array.isArray(rawFrequencies)) {
return [];
}
return rawFrequencies
.filter((entry) => entry && typeof entry === "object")
.map((entry) => ({
...entry,
dictionaryPriority:
typeof entry.dictionary === "string" && dictionaryPriorityByName[entry.dictionary] !== undefined
? dictionaryPriorityByName[entry.dictionary]
: Number.MAX_SAFE_INTEGER
}));
})();
`;
try {
const rawResult = await parserWindow.webContents.executeJavaScript(script, true);
return Array.isArray(rawResult)
? rawResult
.map((entry) => toYomitanTermFrequency(entry))
.filter((entry): entry is YomitanTermFrequency => entry !== null)
: [];
} catch (err) {
logger.error('Yomitan term frequency request failed:', (err as Error).message);
return null;
}
}
function cacheFrequencyEntriesForPairs(
frequencyCache: Map<string, YomitanTermFrequency[]>,
termReadingList: YomitanTermReadingPair[],
fetchedEntries: YomitanTermFrequency[],
): void {
const groupedByPair = groupFrequencyEntriesByPair(fetchedEntries);
const groupedByTerm = groupFrequencyEntriesByTerm(fetchedEntries);
for (const pair of termReadingList) {
const key = makeTermReadingCacheKey(pair.term, pair.reading);
const exactEntries = groupedByPair.get(key);
const termEntries = groupedByTerm.get(pair.term) ?? [];
frequencyCache.set(key, exactEntries ?? termEntries);
}
}
export async function requestYomitanTermFrequencies(
termReadingList: YomitanTermReadingPair[],
deps: YomitanParserRuntimeDeps,
@@ -622,159 +765,98 @@ export async function requestYomitanTermFrequencies(
return buildCachedResult();
}
if (metadata && metadata.dictionaries.length > 0) {
const script = `
(async () => {
const invoke = (action, params) =>
new Promise((resolve, reject) => {
chrome.runtime.sendMessage({ action, params }, (response) => {
if (chrome.runtime.lastError) {
reject(new Error(chrome.runtime.lastError.message));
return;
}
if (!response || typeof response !== "object") {
reject(new Error("Invalid response from Yomitan backend"));
return;
}
if (response.error) {
reject(new Error(response.error.message || "Yomitan backend error"));
return;
}
resolve(response.result);
});
});
const fetchedEntries = await fetchYomitanTermFrequencies(
parserWindow,
missingTermReadingList,
metadata,
logger,
);
if (fetchedEntries === null) {
return buildCachedResult();
}
return await invoke("getTermFrequencies", {
termReadingList: ${JSON.stringify(missingTermReadingList)},
dictionaries: ${JSON.stringify(metadata.dictionaries)}
});
})();
`;
cacheFrequencyEntriesForPairs(frequencyCache, missingTermReadingList, fetchedEntries);
try {
const rawResult = await parserWindow.webContents.executeJavaScript(script, true);
const fetchedEntries = Array.isArray(rawResult)
? normalizeFrequencyEntriesWithPriority(rawResult, metadata.dictionaryPriorityByName)
: [];
const groupedByPair = groupFrequencyEntriesByPair(fetchedEntries);
const groupedByTerm = groupFrequencyEntriesByTerm(fetchedEntries);
const missingTerms = new Set(missingTermReadingList.map((pair) => pair.term));
for (const pair of missingTermReadingList) {
const fallbackTermReadingList = normalizeTermReadingList(
missingTermReadingList
.filter((pair) => pair.reading !== null)
.map((pair) => {
const key = makeTermReadingCacheKey(pair.term, pair.reading);
const exactEntries = groupedByPair.get(key);
const termEntries = groupedByTerm.get(pair.term) ?? [];
frequencyCache.set(key, exactEntries ?? termEntries);
}
const cachedEntries = frequencyCache.get(key);
if (cachedEntries && cachedEntries.length > 0) {
return null;
}
const cachedResult = buildCachedResult();
const unmatchedEntries = fetchedEntries.filter((entry) => !missingTerms.has(entry.term.trim()));
return [...cachedResult, ...unmatchedEntries];
} catch (err) {
logger.error('Yomitan term frequency request failed:', (err as Error).message);
const fallbackKey = makeTermReadingCacheKey(pair.term, null);
const cachedFallback = frequencyCache.get(fallbackKey);
if (cachedFallback && cachedFallback.length > 0) {
frequencyCache.set(key, cachedFallback);
return null;
}
return { term: pair.term, reading: null };
})
.filter((pair): pair is { term: string; reading: null } => pair !== null),
).filter((pair) => !frequencyCache.has(makeTermReadingCacheKey(pair.term, pair.reading)));
let fallbackFetchedEntries: YomitanTermFrequency[] = [];
if (fallbackTermReadingList.length > 0) {
const fallbackFetchResult = await fetchYomitanTermFrequencies(
parserWindow,
fallbackTermReadingList,
metadata,
logger,
);
if (fallbackFetchResult !== null) {
fallbackFetchedEntries = fallbackFetchResult;
cacheFrequencyEntriesForPairs(
frequencyCache,
fallbackTermReadingList,
fallbackFetchedEntries,
);
}
return buildCachedResult();
}
const script = `
(async () => {
const invoke = (action, params) =>
new Promise((resolve, reject) => {
chrome.runtime.sendMessage({ action, params }, (response) => {
if (chrome.runtime.lastError) {
reject(new Error(chrome.runtime.lastError.message));
return;
}
if (!response || typeof response !== "object") {
reject(new Error("Invalid response from Yomitan backend"));
return;
}
if (response.error) {
reject(new Error(response.error.message || "Yomitan backend error"));
return;
}
resolve(response.result);
});
});
const optionsFull = await invoke("optionsGetFull", undefined);
const profileIndex = optionsFull.profileCurrent;
const dictionariesRaw = optionsFull.profiles?.[profileIndex]?.options?.dictionaries ?? [];
const dictionaryEntries = Array.isArray(dictionariesRaw)
? dictionariesRaw
.filter((entry) => entry && typeof entry === "object" && entry.enabled === true && typeof entry.name === "string")
.map((entry, index) => ({
name: entry.name,
id: typeof entry.id === "number" && Number.isFinite(entry.id) ? Math.floor(entry.id) : index
}))
.sort((a, b) => a.id - b.id)
: [];
const dictionaries = dictionaryEntries.map((entry) => entry.name);
const dictionaryPriorityByName = dictionaryEntries.reduce((acc, entry, index) => {
acc[entry.name] = index;
return acc;
}, {});
if (dictionaries.length === 0) {
return [];
}
const rawFrequencies = await invoke("getTermFrequencies", {
termReadingList: ${JSON.stringify(missingTermReadingList)},
dictionaries
});
if (!Array.isArray(rawFrequencies)) {
return [];
}
return rawFrequencies
.filter((entry) => entry && typeof entry === "object")
.map((entry) => ({
...entry,
dictionaryPriority:
typeof entry.dictionary === "string" && dictionaryPriorityByName[entry.dictionary] !== undefined
? dictionaryPriorityByName[entry.dictionary]
: Number.MAX_SAFE_INTEGER
}));
})();
`;
try {
const rawResult = await parserWindow.webContents.executeJavaScript(script, true);
const fetchedEntries = Array.isArray(rawResult)
? rawResult
.map((entry) => toYomitanTermFrequency(entry))
.filter((entry): entry is YomitanTermFrequency => entry !== null)
: [];
const groupedByPair = groupFrequencyEntriesByPair(fetchedEntries);
const groupedByTerm = groupFrequencyEntriesByTerm(fetchedEntries);
const missingTerms = new Set(missingTermReadingList.map((pair) => pair.term));
for (const pair of missingTermReadingList) {
if (pair.reading === null) {
continue;
}
const key = makeTermReadingCacheKey(pair.term, pair.reading);
const exactEntries = groupedByPair.get(key);
const termEntries = groupedByTerm.get(pair.term) ?? [];
frequencyCache.set(key, exactEntries ?? termEntries);
const cachedEntries = frequencyCache.get(key);
if (cachedEntries && cachedEntries.length > 0) {
continue;
}
const fallbackEntries = frequencyCache.get(makeTermReadingCacheKey(pair.term, null));
if (fallbackEntries && fallbackEntries.length > 0) {
frequencyCache.set(key, fallbackEntries);
}
}
const cachedResult = buildCachedResult();
const unmatchedEntries = fetchedEntries.filter((entry) => !missingTerms.has(entry.term.trim()));
return [...cachedResult, ...unmatchedEntries];
} catch (err) {
logger.error('Yomitan term frequency request failed:', (err as Error).message);
return buildCachedResult();
}
const allFetchedEntries = [...fetchedEntries, ...fallbackFetchedEntries];
const queriedTerms = new Set(
[...missingTermReadingList, ...fallbackTermReadingList].map((pair) => pair.term),
);
const cachedResult = buildCachedResult();
const unmatchedEntries = allFetchedEntries.filter(
(entry) => !queriedTerms.has(entry.term.trim()),
);
return [...cachedResult, ...unmatchedEntries];
}
export async function syncYomitanDefaultAnkiServer(
serverUrl: string,
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
options?: {
forceOverride?: boolean;
},
): Promise<boolean> {
const normalizedTargetServer = serverUrl.trim();
if (!normalizedTargetServer) {
return false;
}
const forceOverride = options?.forceOverride === true;
const isReady = await ensureYomitanParserWindow(deps, logger);
const parserWindow = deps.getYomitanParserWindow();
@@ -804,35 +886,42 @@ export async function syncYomitanDefaultAnkiServer(
});
const targetServer = ${JSON.stringify(normalizedTargetServer)};
const forceOverride = ${forceOverride ? 'true' : 'false'};
const optionsFull = await invoke("optionsGetFull", undefined);
const profiles = Array.isArray(optionsFull.profiles) ? optionsFull.profiles : [];
if (profiles.length === 0) {
return { updated: false, reason: "no-profiles" };
}
const defaultProfile = profiles[0];
if (!defaultProfile || typeof defaultProfile !== "object") {
const profileCurrent = Number.isInteger(optionsFull.profileCurrent)
? optionsFull.profileCurrent
: 0;
const targetProfile = profiles[profileCurrent];
if (!targetProfile || typeof targetProfile !== "object") {
return { updated: false, reason: "invalid-default-profile" };
}
defaultProfile.options = defaultProfile.options && typeof defaultProfile.options === "object"
? defaultProfile.options
targetProfile.options = targetProfile.options && typeof targetProfile.options === "object"
? targetProfile.options
: {};
defaultProfile.options.anki = defaultProfile.options.anki && typeof defaultProfile.options.anki === "object"
? defaultProfile.options.anki
targetProfile.options.anki = targetProfile.options.anki && typeof targetProfile.options.anki === "object"
? targetProfile.options.anki
: {};
const currentServerRaw = defaultProfile.options.anki.server;
const currentServerRaw = targetProfile.options.anki.server;
const currentServer = typeof currentServerRaw === "string" ? currentServerRaw.trim() : "";
const canReplaceDefault =
currentServer.length === 0 || currentServer === "http://127.0.0.1:8765";
if (!canReplaceDefault || currentServer === targetServer) {
return { updated: false, reason: "no-change", currentServer, targetServer };
if (currentServer === targetServer) {
return { updated: false, matched: true, reason: "already-target", currentServer, targetServer };
}
const canReplaceCurrent =
forceOverride || currentServer.length === 0 || currentServer === "http://127.0.0.1:8765";
if (!canReplaceCurrent) {
return { updated: false, matched: false, reason: "blocked-existing-server", currentServer, targetServer };
}
defaultProfile.options.anki.server = targetServer;
targetProfile.options.anki.server = targetServer;
await invoke("setAllSettings", { value: optionsFull, source: "subminer" });
return { updated: true, currentServer, targetServer };
return { updated: true, matched: true, currentServer, targetServer };
})();
`;
@@ -846,7 +935,29 @@ export async function syncYomitanDefaultAnkiServer(
logger.info?.(`Updated Yomitan default profile Anki server to ${normalizedTargetServer}`);
return true;
}
return false;
const matchedWithoutUpdate =
isObject(result) &&
result.updated === false &&
(result as { matched?: unknown }).matched === true;
if (matchedWithoutUpdate) {
return true;
}
const blockedByExistingServer =
isObject(result) &&
result.updated === false &&
(result as { matched?: unknown }).matched === false &&
typeof (result as { reason?: unknown }).reason === 'string';
if (blockedByExistingServer) {
logger.info?.(
`Skipped syncing Yomitan Anki server (reason=${String((result as { reason: string }).reason)})`,
);
return false;
}
const checkedWithoutUpdate =
typeof result === 'object' &&
result !== null &&
(result as { updated?: unknown }).updated === false;
return checkedWithoutUpdate;
} catch (err) {
logger.error('Failed to sync Yomitan default profile Anki server:', (err as Error).message);
return false;

View File

@@ -0,0 +1,53 @@
import * as fs from 'fs';
import * as path from 'path';
const YOMITAN_SYNC_SCRIPT_PATHS = [
path.join('js', 'app', 'popup.js'),
path.join('js', 'display', 'popup-main.js'),
path.join('js', 'display', 'display.js'),
path.join('js', 'display', 'display-audio.js'),
];
function readManifestVersion(manifestPath: string): string | null {
try {
const parsed = JSON.parse(fs.readFileSync(manifestPath, 'utf-8')) as { version?: unknown };
return typeof parsed.version === 'string' ? parsed.version : null;
} catch {
return null;
}
}
function areFilesEqual(sourcePath: string, targetPath: string): boolean {
if (!fs.existsSync(sourcePath) || !fs.existsSync(targetPath)) return false;
try {
return fs.readFileSync(sourcePath).equals(fs.readFileSync(targetPath));
} catch {
return false;
}
}
export function shouldCopyYomitanExtension(sourceDir: string, targetDir: string): boolean {
if (!fs.existsSync(targetDir)) {
return true;
}
const sourceManifest = path.join(sourceDir, 'manifest.json');
const targetManifest = path.join(targetDir, 'manifest.json');
if (!fs.existsSync(sourceManifest) || !fs.existsSync(targetManifest)) {
return true;
}
const sourceVersion = readManifestVersion(sourceManifest);
const targetVersion = readManifestVersion(targetManifest);
if (sourceVersion === null || targetVersion === null || sourceVersion !== targetVersion) {
return true;
}
for (const relativePath of YOMITAN_SYNC_SCRIPT_PATHS) {
if (!areFilesEqual(path.join(sourceDir, relativePath), path.join(targetDir, relativePath))) {
return true;
}
}
return false;
}

View File

@@ -0,0 +1,52 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import { shouldCopyYomitanExtension } from './yomitan-extension-copy';
function writeFile(filePath: string, content: string): void {
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.writeFileSync(filePath, content, 'utf-8');
}
test('shouldCopyYomitanExtension detects popup runtime script drift', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'yomitan-copy-test-'));
const sourceDir = path.join(tempRoot, 'source');
const targetDir = path.join(tempRoot, 'target');
writeFile(path.join(sourceDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
writeFile(path.join(targetDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
writeFile(path.join(sourceDir, 'js', 'app', 'popup.js'), 'same-popup-script');
writeFile(path.join(targetDir, 'js', 'app', 'popup.js'), 'same-popup-script');
writeFile(path.join(sourceDir, 'js', 'display', 'popup-main.js'), 'source-popup-main');
writeFile(path.join(targetDir, 'js', 'display', 'popup-main.js'), 'target-popup-main');
assert.equal(shouldCopyYomitanExtension(sourceDir, targetDir), true);
});
test('shouldCopyYomitanExtension skips copy when versions and watched scripts match', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'yomitan-copy-test-'));
const sourceDir = path.join(tempRoot, 'source');
const targetDir = path.join(tempRoot, 'target');
writeFile(path.join(sourceDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
writeFile(path.join(targetDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
writeFile(path.join(sourceDir, 'js', 'app', 'popup.js'), 'same-popup-script');
writeFile(path.join(targetDir, 'js', 'app', 'popup.js'), 'same-popup-script');
writeFile(path.join(sourceDir, 'js', 'display', 'popup-main.js'), 'same-popup-main');
writeFile(path.join(targetDir, 'js', 'display', 'popup-main.js'), 'same-popup-main');
writeFile(path.join(sourceDir, 'js', 'display', 'display.js'), 'same-display');
writeFile(path.join(targetDir, 'js', 'display', 'display.js'), 'same-display');
writeFile(path.join(sourceDir, 'js', 'display', 'display-audio.js'), 'same-display-audio');
writeFile(path.join(targetDir, 'js', 'display', 'display-audio.js'), 'same-display-audio');
assert.equal(shouldCopyYomitanExtension(sourceDir, targetDir), false);
});

View File

@@ -2,6 +2,7 @@ import { BrowserWindow, Extension, session } from 'electron';
import * as fs from 'fs';
import * as path from 'path';
import { createLogger } from '../../logger';
import { shouldCopyYomitanExtension } from './yomitan-extension-copy';
const logger = createLogger('main:yomitan-extension-loader');
@@ -22,27 +23,7 @@ function ensureExtensionCopy(sourceDir: string, userDataPath: string): string {
const extensionsRoot = path.join(userDataPath, 'extensions');
const targetDir = path.join(extensionsRoot, 'yomitan');
const sourceManifest = path.join(sourceDir, 'manifest.json');
const targetManifest = path.join(targetDir, 'manifest.json');
let shouldCopy = !fs.existsSync(targetDir);
if (!shouldCopy && fs.existsSync(sourceManifest) && fs.existsSync(targetManifest)) {
try {
const sourceVersion = (
JSON.parse(fs.readFileSync(sourceManifest, 'utf-8')) as {
version: string;
}
).version;
const targetVersion = (
JSON.parse(fs.readFileSync(targetManifest, 'utf-8')) as {
version: string;
}
).version;
shouldCopy = sourceVersion !== targetVersion;
} catch {
shouldCopy = true;
}
}
const shouldCopy = shouldCopyYomitanExtension(sourceDir, targetDir);
if (shouldCopy) {
fs.mkdirSync(extensionsRoot, { recursive: true });

View File

@@ -33,7 +33,13 @@ test('sanitizeBackgroundEnv marks background child and keeps warning suppression
test('shouldDetachBackgroundLaunch only for first background invocation', () => {
assert.equal(shouldDetachBackgroundLaunch(['--background'], {}), true);
assert.equal(shouldDetachBackgroundLaunch(['--background'], { SUBMINER_BACKGROUND_CHILD: '1' }), false);
assert.equal(shouldDetachBackgroundLaunch(['--background'], { ELECTRON_RUN_AS_NODE: '1' }), false);
assert.equal(
shouldDetachBackgroundLaunch(['--background'], { SUBMINER_BACKGROUND_CHILD: '1' }),
false,
);
assert.equal(
shouldDetachBackgroundLaunch(['--background'], { ELECTRON_RUN_AS_NODE: '1' }),
false,
);
assert.equal(shouldDetachBackgroundLaunch(['--start'], {}), false);
});

View File

@@ -331,6 +331,7 @@ import {
copyCurrentSubtitle as copyCurrentSubtitleCore,
createConfigHotReloadRuntime,
createDiscordPresenceService,
createShiftSubtitleDelayToAdjacentCueHandler,
createFieldGroupingOverlayRuntime,
createOverlayContentMeasurementStore,
createOverlayManager,
@@ -853,21 +854,36 @@ const subsyncRuntime = createMainSubsyncRuntime(buildMainSubsyncRuntimeMainDepsH
let autoPlayReadySignalMediaPath: string | null = null;
let autoPlayReadySignalGeneration = 0;
function maybeSignalPluginAutoplayReady(payload: SubtitleData): void {
function maybeSignalPluginAutoplayReady(
payload: SubtitleData,
options?: { forceWhilePaused?: boolean },
): void {
if (!payload.text.trim()) {
return;
}
const mediaPath = appState.currentMediaPath;
if (!mediaPath) {
const mediaPath =
appState.currentMediaPath?.trim() ||
appState.mpvClient?.currentVideoPath?.trim() ||
'__unknown__';
const duplicateMediaSignal = autoPlayReadySignalMediaPath === mediaPath;
const allowDuplicateWhilePaused =
options?.forceWhilePaused === true && appState.playbackPaused !== false;
if (duplicateMediaSignal && !allowDuplicateWhilePaused) {
return;
}
if (autoPlayReadySignalMediaPath === mediaPath) {
const signalPluginAutoplayReady = (): void => {
logger.debug(`[autoplay-ready] signaling mpv for media: ${mediaPath}`);
sendMpvCommandRuntime(appState.mpvClient, ['script-message', 'subminer-autoplay-ready']);
};
if (duplicateMediaSignal && allowDuplicateWhilePaused) {
// Keep re-notifying the plugin while paused (for startup visibility sync), but
// do not run local unpause fallback on duplicates to avoid resuming user-paused playback.
signalPluginAutoplayReady();
return;
}
autoPlayReadySignalMediaPath = mediaPath;
const playbackGeneration = ++autoPlayReadySignalGeneration;
logger.debug(`[autoplay-ready] signaling mpv for media: ${mediaPath}`);
sendMpvCommandRuntime(appState.mpvClient, ['script-message', 'subminer-autoplay-ready']);
signalPluginAutoplayReady();
const isPlaybackPaused = async (client: {
requestProperty: (property: string) => Promise<unknown>;
}): Promise<boolean> => {
@@ -882,7 +898,9 @@ function maybeSignalPluginAutoplayReady(payload: SubtitleData): void {
if (typeof pauseProperty === 'number') {
return pauseProperty !== 0;
}
logger.debug(`[autoplay-ready] unrecognized pause property for media ${mediaPath}: ${String(pauseProperty)}`);
logger.debug(
`[autoplay-ready] unrecognized pause property for media ${mediaPath}: ${String(pauseProperty)}`,
);
} catch (error) {
logger.debug(
`[autoplay-ready] failed to read pause property for media ${mediaPath}: ${(error as Error).message}`,
@@ -891,55 +909,52 @@ function maybeSignalPluginAutoplayReady(payload: SubtitleData): void {
return true;
};
// Fallback: unpause directly in case plugin readiness handler is unavailable/outdated.
void (async () => {
const mpvClient = appState.mpvClient;
if (!mpvClient?.connected) {
logger.debug('[autoplay-ready] skipped unpause fallback; mpv not connected');
return;
}
// Fallback: repeatedly try to release pause for a short window in case startup
// gate arming and tokenization-ready signal arrive out of order.
const maxReleaseAttempts = options?.forceWhilePaused === true ? 14 : 3;
const releaseRetryDelayMs = 200;
const attemptRelease = (attempt: number): void => {
void (async () => {
if (
autoPlayReadySignalMediaPath !== mediaPath ||
playbackGeneration !== autoPlayReadySignalGeneration
) {
return;
}
const shouldUnpause = await isPlaybackPaused(mpvClient);
logger.debug(`[autoplay-ready] mpv paused before fallback for ${mediaPath}: ${shouldUnpause}`);
if (!shouldUnpause) {
logger.debug('[autoplay-ready] mpv already playing; no fallback unpause needed');
return;
}
mpvClient.send({ command: ['set_property', 'pause', false] });
setTimeout(() => {
void (async () => {
if (
autoPlayReadySignalMediaPath !== mediaPath ||
playbackGeneration !== autoPlayReadySignalGeneration
) {
return;
const mpvClient = appState.mpvClient;
if (!mpvClient?.connected) {
if (attempt < maxReleaseAttempts) {
setTimeout(() => attemptRelease(attempt + 1), releaseRetryDelayMs);
}
return;
}
const followupClient = appState.mpvClient;
if (!followupClient?.connected) {
return;
const shouldUnpause = await isPlaybackPaused(mpvClient);
logger.debug(
`[autoplay-ready] mpv paused before fallback attempt ${attempt} for ${mediaPath}: ${shouldUnpause}`,
);
if (!shouldUnpause) {
if (attempt === 0) {
logger.debug('[autoplay-ready] mpv already playing; no fallback unpause needed');
}
return;
}
const shouldUnpauseFollowup = await isPlaybackPaused(followupClient);
if (!shouldUnpauseFollowup) {
return;
}
followupClient.send({ command: ['set_property', 'pause', false] });
})();
}, 500);
logger.debug('[autoplay-ready] issued direct mpv unpause fallback');
})();
signalPluginAutoplayReady();
mpvClient.send({ command: ['set_property', 'pause', false] });
if (attempt < maxReleaseAttempts) {
setTimeout(() => attemptRelease(attempt + 1), releaseRetryDelayMs);
}
})();
};
attemptRelease(0);
}
let appTray: Tray | null = null;
const buildSubtitleProcessingControllerMainDepsHandler =
createBuildSubtitleProcessingControllerMainDepsHandler({
tokenizeSubtitle: async (text: string) => {
if (getOverlayWindows().length === 0 && !subtitleWsService.hasClients()) {
return null;
}
return await tokenizeSubtitle(text);
},
emitSubtitle: (payload) => {
@@ -950,7 +965,6 @@ const buildSubtitleProcessingControllerMainDepsHandler =
topX: getResolvedConfig().subtitleStyle.frequencyDictionary.topX,
mode: getResolvedConfig().subtitleStyle.frequencyDictionary.mode,
});
maybeSignalPluginAutoplayReady(payload);
},
logDebug: (message) => {
logger.debug(`[subtitle-processing] ${message}`);
@@ -1353,6 +1367,23 @@ function getRuntimeBooleanOption(
return typeof value === 'boolean' ? value : fallback;
}
function shouldInitializeMecabForAnnotations(): boolean {
const config = getResolvedConfig();
const nPlusOneEnabled = getRuntimeBooleanOption(
'subtitle.annotation.nPlusOne',
config.ankiConnect.nPlusOne.highlightEnabled,
);
const jlptEnabled = getRuntimeBooleanOption(
'subtitle.annotation.jlpt',
config.subtitleStyle.enableJlpt,
);
const frequencyEnabled = getRuntimeBooleanOption(
'subtitle.annotation.frequency',
config.subtitleStyle.frequencyDictionary.enabled,
);
return nPlusOneEnabled || jlptEnabled || frequencyEnabled;
}
const {
getResolvedJellyfinConfig,
getJellyfinClientInfo,
@@ -2320,9 +2351,7 @@ const {
ensureImmersionTrackerStarted();
},
updateCurrentMediaPath: (path) => {
if (appState.currentMediaPath !== path) {
autoPlayReadySignalMediaPath = null;
}
autoPlayReadySignalMediaPath = null;
if (path) {
ensureImmersionTrackerStarted();
}
@@ -2428,6 +2457,9 @@ const {
getFrequencyRank: (text) => appState.frequencyRankLookup(text),
getYomitanGroupDebugEnabled: () => appState.overlayDebugVisualizationEnabled,
getMecabTokenizer: () => appState.mecabTokenizer,
onTokenizationReady: (text) => {
maybeSignalPluginAutoplayReady({ text, tokens: null }, { forceWhilePaused: true });
},
},
createTokenizerRuntimeDeps: (deps) =>
createTokenizerDepsRuntime(deps as Parameters<typeof createTokenizerDepsRuntime>[0]),
@@ -2469,7 +2501,10 @@ const {
if (startupWarmups.lowPowerMode) {
return false;
}
return startupWarmups.mecab;
if (!startupWarmups.mecab) {
return false;
}
return shouldInitializeMecabForAnnotations();
},
shouldWarmupYomitanExtension: () => getResolvedConfig().startupWarmups.yomitanExtension,
shouldWarmupSubtitleDictionaries: () => {
@@ -2609,7 +2644,7 @@ async function syncYomitanDefaultProfileAnkiServer(): Promise<void> {
return;
}
const updated = await syncYomitanDefaultAnkiServerCore(
const synced = await syncYomitanDefaultAnkiServerCore(
targetUrl,
{
getYomitanExt: () => appState.yomitanExt,
@@ -2634,10 +2669,12 @@ async function syncYomitanDefaultProfileAnkiServer(): Promise<void> {
logger.info(message, ...args);
},
},
{
forceOverride: getResolvedConfig().ankiConnect.proxy?.enabled === true,
},
);
if (updated) {
logger.info(`Yomitan default profile Anki server set to ${targetUrl}`);
if (synced) {
lastSyncedYomitanAnkiServer = targetUrl;
}
}
@@ -2925,6 +2962,30 @@ const appendClipboardVideoToQueueHandler = createAppendClipboardVideoToQueueHand
appendClipboardVideoToQueueMainDeps,
);
const shiftSubtitleDelayToAdjacentCueHandler = createShiftSubtitleDelayToAdjacentCueHandler({
getMpvClient: () => appState.mpvClient,
loadSubtitleSourceText: async (source) => {
if (/^https?:\/\//i.test(source)) {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 4000);
try {
const response = await fetch(source, { signal: controller.signal });
if (!response.ok) {
throw new Error(`Failed to download subtitle source (${response.status})`);
}
return await response.text();
} finally {
clearTimeout(timeoutId);
}
}
const filePath = source.startsWith('file://') ? decodeURI(new URL(source).pathname) : source;
return fs.promises.readFile(filePath, 'utf8');
},
sendMpvCommand: (command) => sendMpvCommandRuntime(appState.mpvClient, command),
showMpvOsd: (text) => showMpvOsd(text),
});
const {
handleMpvCommandFromIpc: handleMpvCommandFromIpcHandler,
runSubsyncManualFromIpc: runSubsyncManualFromIpcHandler,
@@ -2945,6 +3006,8 @@ const {
showMpvOsd: (text: string) => showMpvOsd(text),
replayCurrentSubtitle: () => replayCurrentSubtitleRuntime(appState.mpvClient),
playNextSubtitle: () => playNextSubtitleRuntime(appState.mpvClient),
shiftSubDelayToAdjacentSubtitle: (direction) =>
shiftSubtitleDelayToAdjacentCueHandler(direction),
sendMpvCommand: (rawCommand: (string | number)[]) =>
sendMpvCommandRuntime(appState.mpvClient, rawCommand),
isMpvConnected: () => Boolean(appState.mpvClient && appState.mpvClient.connected),

View File

@@ -180,6 +180,7 @@ export interface MpvCommandRuntimeServiceDepsParams {
showMpvOsd: HandleMpvCommandFromIpcOptions['showMpvOsd'];
mpvReplaySubtitle: HandleMpvCommandFromIpcOptions['mpvReplaySubtitle'];
mpvPlayNextSubtitle: HandleMpvCommandFromIpcOptions['mpvPlayNextSubtitle'];
shiftSubDelayToAdjacentSubtitle: HandleMpvCommandFromIpcOptions['shiftSubDelayToAdjacentSubtitle'];
mpvSendCommand: HandleMpvCommandFromIpcOptions['mpvSendCommand'];
isMpvConnected: HandleMpvCommandFromIpcOptions['isMpvConnected'];
hasRuntimeOptionsManager: HandleMpvCommandFromIpcOptions['hasRuntimeOptionsManager'];
@@ -328,6 +329,7 @@ export function createMpvCommandRuntimeServiceDeps(
showMpvOsd: params.showMpvOsd,
mpvReplaySubtitle: params.mpvReplaySubtitle,
mpvPlayNextSubtitle: params.mpvPlayNextSubtitle,
shiftSubDelayToAdjacentSubtitle: params.shiftSubDelayToAdjacentSubtitle,
mpvSendCommand: params.mpvSendCommand,
isMpvConnected: params.isMpvConnected,
hasRuntimeOptionsManager: params.hasRuntimeOptionsManager,

View File

@@ -10,6 +10,7 @@ export interface MpvCommandFromIpcRuntimeDeps {
showMpvOsd: (text: string) => void;
replayCurrentSubtitle: () => void;
playNextSubtitle: () => void;
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
sendMpvCommand: (command: (string | number)[]) => void;
isMpvConnected: () => boolean;
hasRuntimeOptionsManager: () => boolean;
@@ -29,6 +30,8 @@ export function handleMpvCommandFromIpcRuntime(
showMpvOsd: deps.showMpvOsd,
mpvReplaySubtitle: deps.replayCurrentSubtitle,
mpvPlayNextSubtitle: deps.playNextSubtitle,
shiftSubDelayToAdjacentSubtitle: (direction) =>
deps.shiftSubDelayToAdjacentSubtitle(direction),
mpvSendCommand: deps.sendMpvCommand,
isMpvConnected: deps.isMpvConnected,
hasRuntimeOptionsManager: deps.hasRuntimeOptionsManager,

Some files were not shown because too many files have changed in this diff Show More