diff --git a/backlog/completed/task-336 - Fix-Hyprland-fullscreen-overlay-downward-offset.md b/backlog/completed/task-336 - Fix-Hyprland-fullscreen-overlay-downward-offset.md new file mode 100644 index 00000000..2a583cdf --- /dev/null +++ b/backlog/completed/task-336 - Fix-Hyprland-fullscreen-overlay-downward-offset.md @@ -0,0 +1,76 @@ +--- +id: TASK-336 +title: Fix Hyprland fullscreen overlay downward offset +status: Done +assignee: [] +created_date: '2026-05-04 05:42' +updated_date: '2026-05-04 06:10' +labels: + - linux + - hyprland + - overlay + - bug +dependencies: [] +references: + - src/window-trackers/hyprland-tracker.ts + - src/core/services/overlay-window-bounds.ts + - src/main/runtime/linux-mpv-fullscreen-overlay-refresh.ts +priority: medium +--- + +## Description + + +SubMiner visible overlay is slightly below mpv when mpv is fullscreen on Linux Hyprland. Align overlay bounds with mpv fullscreen client/monitor bounds. + + +## Acceptance Criteria + +- [x] #1 Hyprland fullscreen mpv overlay uses top-aligned geometry instead of inheriting a downward offset. +- [x] #2 Regression coverage captures the fullscreen Hyprland geometry case. +- [x] #3 Targeted tests pass. + + +## Implementation Notes + + +Added follow-up Hyprland placement handling after the fullscreenClient geometry fix. SubMiner overlay/stats windows now get stable titles and, on Hyprland, are resolved from `hyprctl -j clients` by current PID/title, then set floating before bounds are applied. The stats overlay reapplies bounds after showing because Hyprland cannot see the hidden window before it is mapped. + +2026-05-04 follow-up: offset remains after removing pinning. User reports stats modal still has a top gap from mpv in Hyprland fullscreen. Need inspect exact stats overlay CSS/window bounds after float-only placement. + +2026-05-04 follow-up fix: stats CSS already had zero body margin, so the remaining gap points at native Hyprland placement after float-only handling. Added exact `movewindowpixel`/`resizewindowpixel` Hyprland dispatches using the same tracked mpv bounds passed to Electron. + +2026-05-04 second follow-up: live `hyprctl -j clients` showed the SubMiner client was already full monitor size at `[0,0]`, so the remaining visible top strip was inside Electron's transparent stats surface rather than compositor geometry. Made the stats overlay BrowserWindow opaque with the stats base background. Also prevented page titles from overwriting the stable SubMiner overlay/stats titles used for Hyprland client matching. + +2026-05-04 third follow-up: user confirmed native overlay placement is correct and the remaining gap is stats-page-specific. Made stats overlay mode paint an opaque full-viewport root/background and constrained the stats app to `h-screen` with an internal scrolling main pane, so the overlay page itself covers the mpv frame from y=0. + +2026-05-04 fourth follow-up: live Hyprland data showed mpv and SubMiner shared the same outer geometry while stats content still rendered lower. Stats window placement now compensates for Electron/Wayland content insets using `getContentBounds()` versus `getBounds()`, then sends the adjusted outer bounds to Hyprland exact placement so the content area, not just the native surface, aligns to mpv. + +2026-05-04 fifth follow-up: user confirmed the offset is Hyprland-fullscreen-only and not present while mpv is windowed. Added Hyprland `setprop` decoration cleanup during exact overlay placement (`rounding 0`, `border_size 0`, `no_shadow 1`, `no_blur 1`, `decorate 0`) because fullscreen mpv has square fullscreen edges while a floating SubMiner stats window can retain Hyprland floating-window decoration. + + +## Final Summary + + +Summary: +- Treated Hyprland `fullscreenClient` as a fullscreen signal when resolving mpv overlay geometry. +- Added Hyprland window placement handling so SubMiner overlay/stats windows are set floating before bounds are applied. +- Added exact Hyprland move/resize dispatches so floating overlay/stats windows are force-aligned to the tracked mpv bounds. +- Gave overlay/stats windows stable titles for Hyprland client matching, and reapplied stats bounds after show. +- Locked overlay/stats window titles against page title changes and made the stats overlay window opaque so mpv cannot show through transparent Electron insets. +- Made the stats overlay page paint an opaque full-viewport background and added CSS regression coverage for overlay mode. +- Compensated stats overlay outer placement for Electron/Wayland content insets. +- Disabled Hyprland floating-window decoration for exact overlay placement over fullscreen mpv. +- Added regression coverage for the 28px fullscreen geometry shape and Hyprland placement dispatches. +- Added a changelog fragment for the overlay fix. + +Verification: +- `bun test src/core/services/hyprland-window-placement.test.ts src/core/services/overlay-window-config.test.ts src/core/services/stats-window.test.ts src/core/services/overlay-window-bounds.test.ts src/window-trackers/hyprland-tracker.test.ts` +- `bun run typecheck` +- `bun run changelog:lint` +- `bun run test:fast` +- `bun test stats/src/styles/globals.test.ts stats/src/lib/api-client.test.ts src/core/services/stats-window.test.ts` +- `bun run build:stats` +- `bun test src/core/services/stats-window.test.ts src/core/services/hyprland-window-placement.test.ts stats/src/styles/globals.test.ts` +- `bun test src/core/services/hyprland-window-placement.test.ts src/core/services/stats-window.test.ts stats/src/styles/globals.test.ts` + diff --git a/backlog/completed/task-339 - Stop-pinning-Hyprland-overlay-windows.md b/backlog/completed/task-339 - Stop-pinning-Hyprland-overlay-windows.md new file mode 100644 index 00000000..242316b0 --- /dev/null +++ b/backlog/completed/task-339 - Stop-pinning-Hyprland-overlay-windows.md @@ -0,0 +1,53 @@ +--- +id: TASK-339 +title: Stop pinning Hyprland overlay windows +status: Done +assignee: [] +created_date: '2026-05-04 06:07' +updated_date: '2026-05-04 06:09' +labels: + - linux + - hyprland + - overlay + - bug +dependencies: [] +references: + - src/core/services/hyprland-window-placement.ts + - src/core/services/overlay-window.ts + - src/core/services/stats-window.ts +priority: high +--- + +## Description + + +Recent Hyprland placement fix pins SubMiner overlay/stats windows, making them follow across workspaces instead of staying attached to mpv. Keep the float-for-bounds behavior, but never pin overlay windows. + + +## Acceptance Criteria + +- [x] #1 Hyprland placement dispatches set floating state only and does not dispatch pin. +- [x] #2 Regression coverage proves pinned clients are unpinned or at least not re-pinned by SubMiner. +- [x] #3 Targeted tests and typecheck pass. + + +## Implementation Notes + + +Changed Hyprland placement dispatch construction so unpinned overlay windows only get `setfloating`; pinned overlay windows get a single `pin` dispatch to toggle the bad prior pinned state off. This preserves floating placement for bounds while keeping overlay windows workspace-local with mpv. + + +## Final Summary + + +Summary: +- Stopped re-pinning Hyprland overlay/stats windows during placement. +- Added cleanup behavior for previously pinned SubMiner windows by toggling pin only when Hyprland reports `pinned: true`. +- Updated regression coverage and added a changelog fragment. + +Verification: +- `bun test src/core/services/hyprland-window-placement.test.ts src/core/services/overlay-window-config.test.ts src/core/services/stats-window.test.ts src/core/services/overlay-window-bounds.test.ts src/window-trackers/hyprland-tracker.test.ts` +- `bun run typecheck` +- `bun run changelog:lint` +- `bun run test:fast` + diff --git a/backlog/tasks/task-304 - Fix-N1-sentence-boundary-counting-across-Yomitan-punctuation-gaps.md b/backlog/tasks/task-304 - Fix-N1-sentence-boundary-counting-across-Yomitan-punctuation-gaps.md new file mode 100644 index 00000000..9e7e3803 --- /dev/null +++ b/backlog/tasks/task-304 - Fix-N1-sentence-boundary-counting-across-Yomitan-punctuation-gaps.md @@ -0,0 +1,27 @@ +--- +id: TASK-304 +title: Fix N+1 sentence boundary counting across Yomitan punctuation gaps +status: In Progress +assignee: [] +created_date: '2026-04-26 05:33' +labels: + - bug + - tokenizer + - annotations +dependencies: [] +priority: medium +--- + +## Description + + +N+1 target selection should respect sentence-ending punctuation from the original subtitle text even when Yomitan token output omits punctuation tokens. Current behavior can treat multiple subtitle sentences as one token span and incorrectly satisfy the minimum content-token threshold. + + +## Acceptance Criteria + +- [ ] #1 A subtitle like `てんめ!ふざけんなよ!` does not mark `ふざけん`/similar single-content-token second sentence as N+1 when the minimum sentence word count is 3. +- [ ] #2 N+1 sentence segmentation uses original subtitle text offsets or equivalent source-boundary data, not only punctuation tokens returned by Yomitan. +- [ ] #3 Existing annotation exclusion behavior for particles/grammar tokens remains unchanged. +- [ ] #4 Regression tests cover Yomitan-style token streams where punctuation is absent from the token list. + diff --git a/backlog/tasks/task-305 - Use-Yomitan-word-classes-for-subtitle-token-POS-filtering.md b/backlog/tasks/task-305 - Use-Yomitan-word-classes-for-subtitle-token-POS-filtering.md new file mode 100644 index 00000000..aa19d61f --- /dev/null +++ b/backlog/tasks/task-305 - Use-Yomitan-word-classes-for-subtitle-token-POS-filtering.md @@ -0,0 +1,64 @@ +--- +id: TASK-305 +title: Use Yomitan word classes for subtitle token POS filtering +status: Done +assignee: + - Codex +created_date: '2026-04-26 05:56' +updated_date: '2026-05-02 22:47' +labels: + - tokenizer + - yomitan +dependencies: [] +priority: medium +--- + +## Description + + +Subtitle annotation filtering currently uses Yomitan token spans, then enriches those spans by running MeCab over the full normalized subtitle line. Add support for carrying Yomitan headword wordClasses from termsFind into SubMiner tokens so dictionary-backed tokens can provide coarse POS/tag metadata without vendored Yomitan changes. MeCab whole-line enrichment should remain a fallback/source of detailed POS data when Yomitan classes are absent. + + +## Acceptance Criteria + +- [x] #1 Yomitan scanner tokens preserve matched headword wordClasses when termsFind returns them. +- [x] #2 Subtitle tokenization maps recognized Yomitan wordClasses to coarse PartOfSpeech/POS metadata before annotation filtering. +- [x] #3 Whole-line MeCab enrichment remains available for missing or more detailed POS metadata and does not break existing subtitle annotation behavior. +- [x] #4 Focused tokenizer tests cover wordClasses extraction and POS mapping. + + +## Implementation Plan + + +1. Add focused regression coverage for Yomitan scanner wordClasses payload and subtitle POS mapping. +2. Extend the app-owned Yomitan scanner payload to carry matched headword wordClasses when present. +3. Map recognized Yomitan wordClasses to SubMiner coarse PartOfSpeech/POS metadata before annotation filtering. +4. Keep MeCab whole-line enrichment as fallback/detail-fill for missing POS fields. +5. Run focused tokenizer tests and typecheck. + +2026-05-02 review follow-up: inspect latest CodeRabbit review on PR #57, classify each finding as actionable/not actionable, patch scoped issues, run focused verification, then update final notes. User request to address/assess the review is the approval for this follow-up. + + +## Implementation Notes + + +Implemented app-only wordClasses extraction from termsFind results; no vendored Yomitan changes required. Recognized classes currently map prt, aux, v*, adj-i/adj-ix, adj-na, and noun-like classes to SubMiner POS metadata. MeCab enrichment now skips only tokens with complete pos1/pos2/pos3 and otherwise fills missing fields while preserving existing coarse pos1. Verification: bun test src/core/services/tokenizer/yomitan-parser-runtime.test.ts src/core/services/tokenizer.test.ts; bun run typecheck. + +2026-05-02 CodeRabbit latest review assessment: only current actionable finding was in src/core/services/tokenizer/annotation-stage.test.ts, where a kana-only regression fixture used mixed-script/punctuation surface text. Earlier CodeRabbit findings in this PR were already marked addressed by prior commits. Patched the fixture to use pure-kana surface/headword and renamed the test to match the exercised behavior. Verification: bun test src/core/services/tokenizer/annotation-stage.test.ts; bun run typecheck. + + +## Final Summary + + +Implemented app-only Yomitan wordClasses support for subtitle token annotation filtering. The scanner carries matched headword wordClasses from termsFind results, tokenizer maps recognized classes into SubMiner coarse POS metadata before annotation, and MeCab whole-line enrichment continues to fill missing detailed POS fields without requiring vendored Yomitan changes. + +2026-05-02 CodeRabbit follow-up: +- Assessed the latest CodeRabbit review on PR #57. Only one new actionable finding remained: the kana-only N+1 regression test used a mixed/punctuated surface. +- Updated the fixture in src/core/services/tokenizer/annotation-stage.test.ts to use a pure-kana unknown target and renamed the test accordingly. + +Tests run: +- bun test src/core/services/tokenizer/annotation-stage.test.ts +- bun run typecheck + +Note: earlier CodeRabbit findings on this PR were already marked addressed in prior commits; no further latest-review issues were left unresolved in this pass. + diff --git a/backlog/tasks/task-306 - Fix-Hyprland-fullscreen-overlay-geometry-and-hover-pause.md b/backlog/tasks/task-306 - Fix-Hyprland-fullscreen-overlay-geometry-and-hover-pause.md new file mode 100644 index 00000000..f2e6a76e --- /dev/null +++ b/backlog/tasks/task-306 - Fix-Hyprland-fullscreen-overlay-geometry-and-hover-pause.md @@ -0,0 +1,33 @@ +--- +id: TASK-306 +title: Fix Hyprland fullscreen overlay geometry and hover pause +status: Done +assignee: [] +created_date: '2026-04-27 01:44' +labels: + - linux + - hyprland + - overlay + - bug +dependencies: [] +priority: high +--- + +## Description + + + +Overlay should track mpv geometry through Hyprland fullscreen transitions, stay above fullscreen video, and keep primary subtitle hover pause working after fullscreen/toggle cycles. + +Implemented by observing mpv fullscreen property changes in addition to Hyprland geometry events, then refreshing visible overlay bounds/layering on Linux. + + + +## Acceptance Criteria + + + +- [x] #1 Hyprland tracker reacts to fullscreen/window state changes with updated geometry. +- [x] #2 Visible overlay is re-layered above mpv after Hyprland fullscreen geometry updates. +- [x] #3 Primary subtitle hover pause remains active after overlay geometry changes or visible overlay toggle cycles. + diff --git a/backlog/tasks/task-307 - Exclude-kana-only-words-from-N1-subtitle-targets.md b/backlog/tasks/task-307 - Exclude-kana-only-words-from-N1-subtitle-targets.md new file mode 100644 index 00000000..78e4de50 --- /dev/null +++ b/backlog/tasks/task-307 - Exclude-kana-only-words-from-N1-subtitle-targets.md @@ -0,0 +1,58 @@ +--- +id: TASK-307 +title: Exclude kana-only words from N+1 subtitle targets +status: Done +assignee: + - codex +created_date: '2026-04-27 01:52' +updated_date: '2026-04-27 01:57' +labels: + - tokenizer + - annotations +dependencies: [] +priority: medium +--- + +## Description + + +Subtitle N+1 annotation is over-targeting kana-only or hiragana/katakana tokens that collapse to dictionary words. Adjust targeting so kana-only tokens are not selected as N+1 candidates, while preserving tokenization/hover behavior and other annotation metadata where existing filters allow it. + + +## Acceptance Criteria + +- [x] #1 Kana-only subtitle tokens are not marked as N+1 targets. +- [x] #2 Kanji or mixed lexical tokens can still be marked as N+1 targets when they are the single unknown candidate in a sentence. +- [x] #3 Regression coverage demonstrates the kana-only N+1 exclusion. + + +## Implementation Plan + + +1. Add a failing regression in `src/core/services/tokenizer.test.ts` showing a kana-only Yomitan token is not selected as the single N+1 target, while a mixed lexical token in the same style still can be targeted. +2. Implement the smallest filter in `src/token-merger.ts`: N+1 candidate selection rejects tokens whose surface is entirely kana; word-count behavior remains governed by existing annotation/POS filters. +3. Run the focused tokenizer tests, then update task acceptance criteria/final summary. + + +## Implementation Notes + + +Implemented a surface-level kana-only guard in N+1 candidate selection. Kept existing word-count/POS filtering behavior intact; updated tokenizer and annotation-stage expectations where old tests intentionally allowed kana-only N+1 targets. + + +## Final Summary + + +Summary: +- Added kana-only surface detection to `isNPlusOneCandidateToken` so hiragana/katakana-only subtitle tokens are not selected as N+1 targets. +- Added/updated tokenizer and annotation-stage regressions for kana-only targets while preserving non-kana N+1 behavior. +- Added changelog fragment `changes/307-kana-nplusone-targets.md`. + +Verification: +- `bun test src/core/services/tokenizer.test.ts --test-name-pattern "kana-only N\+1"` failed before the fix with `true !== false`. +- `bun test src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer.test.ts` passed. +- `bun run typecheck` passed. +- `bun run test:fast` passed. +- `bun run changelog:lint` passed. +- `bunx prettier --check src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/token-merger.ts changes/307-kana-nplusone-targets.md` passed. + diff --git a/backlog/tasks/task-308 - Restore-persistent-JLPT-subtitle-underlines.md b/backlog/tasks/task-308 - Restore-persistent-JLPT-subtitle-underlines.md new file mode 100644 index 00000000..3ca96fd8 --- /dev/null +++ b/backlog/tasks/task-308 - Restore-persistent-JLPT-subtitle-underlines.md @@ -0,0 +1,54 @@ +--- +id: TASK-308 +title: Restore persistent JLPT subtitle underlines +status: Done +assignee: + - Codex +created_date: '2026-04-27 02:03' +updated_date: '2026-04-27 02:07' +labels: + - overlay + - jlpt + - renderer +dependencies: [] +priority: medium +--- + +## Description + + +JLPT tagging currently exposes the JLPT level on hover, but the persistent subtitle underline is missing. When JLPT annotation is enabled and a rendered subtitle token has a JLPT level, users should see the configured JLPT color underline without needing to hover. + + +## Acceptance Criteria + +- [x] #1 JLPT-tagged subtitle tokens render a persistent underline for N1-N5 levels when JLPT tagging is enabled. +- [x] #2 Hover and keyboard-selected JLPT labels continue to appear for tagged tokens. +- [x] #3 Higher-priority annotation colors such as known words, N+1, names, and frequency styling are not overridden by JLPT text color. +- [x] #4 Regression coverage verifies the CSS contract for persistent JLPT underlines. + + +## Implementation Plan + + +1. Add a focused renderer CSS regression asserting each `word-jlpt-n*` class provides persistent underline decoration while preserving existing typography constraints. +2. Run the focused renderer test to confirm the regression fails before production changes. +3. Restore underline CSS for JLPT classes without broadening JLPT text-color precedence over known/N+1/name/frequency tokens. +4. Re-run the focused renderer test and update acceptance criteria/task notes. + + +## Implementation Notes + + +Verified red/green regression: tightened `src/renderer/subtitle-render.test.ts` first failed because base `word-jlpt-n*` selectors had no underline decoration, then passed after moving JLPT underline decoration to unconditional base selectors while leaving JLPT text color priority-scoped. + +Checks: `bun test src/renderer/subtitle-render.test.ts`; `bun run changelog:lint`; `bun run typecheck`. + + +## Final Summary + + +Restored persistent JLPT subtitle underlines by adding underline decoration to each base `word-jlpt-n*` renderer CSS class. JLPT text color remains in the existing priority-scoped selectors, so known/N+1/name/frequency coloring is not overridden while the underline still appears on any JLPT-tagged token. + +Updated renderer CSS regression coverage to assert underline decoration for N1-N5 and added a fixed changelog fragment. Verified with `bun test src/renderer/subtitle-render.test.ts`, `bun run changelog:lint`, and `bun run typecheck`. + diff --git a/backlog/tasks/task-309 - Accept-modified-follow-up-digits-for-multi-line-sentence-mining.md b/backlog/tasks/task-309 - Accept-modified-follow-up-digits-for-multi-line-sentence-mining.md new file mode 100644 index 00000000..078d151d --- /dev/null +++ b/backlog/tasks/task-309 - Accept-modified-follow-up-digits-for-multi-line-sentence-mining.md @@ -0,0 +1,57 @@ +--- +id: TASK-309 +title: Accept modified follow-up digits for multi-line sentence mining +status: Done +assignee: + - '@codex' +created_date: '2026-04-27 20:06' +updated_date: '2026-04-27 20:15' +labels: + - bug + - linux + - shortcuts +dependencies: [] +priority: high +--- + +## Description + + + +On Linux, `Ctrl+Shift+S` starts multi-line sentence-card mining, but the follow-up digit is not accepted and the prompt times out. Restore reliable digit capture for the multi-mine flow, including the common case where the original shortcut modifiers are still held briefly while pressing the digit. + + + +## Acceptance Criteria + + + +- [x] #1 `Ctrl+Shift+S` followed by a number-row digit creates a counted `mineSentenceMultiple` request instead of timing out. +- [x] #2 Follow-up digit capture works when the user has not fully released `Ctrl`/`Shift` after the starter shortcut. +- [x] #3 Regression coverage includes renderer session bindings and mpv plugin numeric selection. + + +## Implementation Notes + + + +Backlog MCP unavailable in this session, so this task is tracked via repo-local backlog files. + +Implemented renderer digit extraction from `KeyboardEvent.code` for pending numeric selection, so shifted number-row events such as `Ctrl+Shift+Digit3` still dispatch count `3`. Updated the mpv plugin session-binding numeric selector to register bare digits plus the starter shortcut modifier combinations, so plugin-owned `Ctrl+Shift+S` can accept a follow-up digit before the modifiers are fully released. + +Verification: + +- `bun test src/renderer/handlers/keyboard.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/overlay-window.test.ts` +- `bun run test:plugin:src` +- `bun run changelog:lint` +- `bun x prettier --check src/renderer/handlers/keyboard.ts src/renderer/handlers/keyboard.test.ts package.json 'changes/309-multi-mine-modified-digits.md' 'backlog/tasks/task-309 - Accept-modified-follow-up-digits-for-multi-line-sentence-mining.md'` + + + +## Final Summary + + + +Restored multi-line sentence-card digit capture for the case where `Ctrl`/`Shift` are still held after `Ctrl+Shift+S`. The renderer now accepts digits by physical `Digit1`-`Digit9`/`Numpad1`-`Numpad9` code during pending numeric selection, and the mpv plugin registers the matching modified digit bindings for session-binding numeric prompts. + + diff --git a/backlog/tasks/task-310 - Suppress-N1-highlight-for-kana-only-candidate-sentences.md b/backlog/tasks/task-310 - Suppress-N1-highlight-for-kana-only-candidate-sentences.md new file mode 100644 index 00000000..46093703 --- /dev/null +++ b/backlog/tasks/task-310 - Suppress-N1-highlight-for-kana-only-candidate-sentences.md @@ -0,0 +1,58 @@ +--- +id: TASK-310 +title: Suppress N+1 highlight for kana-only candidate sentences +status: Done +assignee: + - Codex +created_date: '2026-04-28 06:55' +updated_date: '2026-04-28 07:04' +labels: + - tokenizer + - n+1 +dependencies: [] +priority: medium +--- + +## Description + + +Reduce noisy N+1 subtitle annotations when the only unknown candidates in a sentence are kana-only hiragana or katakana words, such as mostly-kana subtitle lines where highlighting a particle/helper-like token is low value. + + +## Acceptance Criteria + +- [x] #1 N+1 annotation does not mark a kana-only unknown target when all N+1 candidates in the sentence are kana-only. +- [x] #2 N+1 annotation continues to mark kanji or mixed-script unknown targets in otherwise eligible sentences. +- [x] #3 A focused regression test covers the kana-only candidate case. +- [x] #4 N+1 minimum sentence word count excludes tokens stripped by the subtitle annotation filter, so filtered grammar/noise tokens cannot satisfy minSentenceWords. + + +## Implementation Plan + + +1. Keep the existing N+1 target eligibility guard: kana-only subtitle surfaces do not become N+1 targets. +2. Add a focused regression in src/core/services/tokenizer/annotation-stage.test.ts proving annotation-filtered tokens do not count toward ankiConnect.nPlusOne.minSentenceWords. +3. Verify the new regression fails before code changes. +4. Patch src/token-merger.ts so the N+1 minimum sentence word count uses the same subtitle-annotation eligibility filter as annotation rendering, excluding filtered particles/auxiliaries/noise from the count. +5. Re-run focused tokenizer tests, then update TASK-310 acceptance criteria and final notes. + + +## Implementation Notes + + +Initial context: current token-merger has an existing surface-level kana-only guard in isNPlusOneCandidateToken, added in commit 9e4ad907. Need decide whether to broaden behavior to lookup/headword forms or verify current behavior only. + +Implemented by treating kana-only N+1 candidates as kana-only even when their token surface includes surrounding subtitle punctuation such as ellipsis or dashes. Focused regression was red before the token-merger change: スイッチ… was marked true, then passed after the guard update. test:env initially hit an unrelated immersion-tracker active_days timing/order failure and Bun follow-on loader error; the failing test passed in isolation and the full test:env rerun passed. + +Reopened for follow-up scope: minSentenceWords must count annotation-eligible tokens only, not tokens stripped from annotation metadata. + +Implemented follow-up minSentenceWords behavior: unknown tokens filtered from N+1 targeting no longer contribute to sentence length; known eligible tokens and true N+1 candidates still count. + + +## Final Summary + + +Changed N+1 sentence-length counting so minSentenceWords only counts known eligible words and actual N+1 target candidates. Unknown tokens filtered from N+1 targeting, including kana-only unknowns, no longer pad a sentence into eligibility. Existing annotation-filtered particles/auxiliaries remain excluded. Added regression coverage for the filtered unknown padding case while preserving kanji/mixed-script target behavior. + +Verification: new regression failed before implementation; `bun test src/core/services/tokenizer/annotation-stage.test.ts -t "N\\+1"` pass; full `bun test src/core/services/tokenizer/annotation-stage.test.ts` pass; `bun test src/core/services/tokenizer.test.ts -t "N\\+1"` pass; `bun run typecheck` pass. + diff --git a/backlog/tasks/task-311 - Suppress-auxiliary-inflection-fragments-from-subtitle-annotations.md b/backlog/tasks/task-311 - Suppress-auxiliary-inflection-fragments-from-subtitle-annotations.md new file mode 100644 index 00000000..e34a8302 --- /dev/null +++ b/backlog/tasks/task-311 - Suppress-auxiliary-inflection-fragments-from-subtitle-annotations.md @@ -0,0 +1,43 @@ +--- +id: TASK-311 +title: Suppress auxiliary inflection fragments from subtitle annotations +status: Done +assignee: [] +created_date: '2026-05-02 09:07' +updated_date: '2026-05-02 09:10' +labels: + - tokenizer + - annotations + - bug +dependencies: [] +priority: medium +--- + +## Description + + +Suppress standalone Japanese auxiliary/inflection subtitle fragments such as `れる` and `れた` from frequency/JLPT/N+1/known annotation styling while keeping lexical verbs such as `くれ` / `くれる` annotatable. Tokens must remain hoverable; only annotation metadata should be stripped. + + +## Acceptance Criteria + +- [x] #1 `れる` and `れた`-style standalone helper fragments render as plain hoverable subtitle tokens. +- [x] #2 Lexical verbs like `くれ` / `くれる` remain eligible for annotation. +- [x] #3 Regression tests cover unit filter behavior and tokenizer integration. + + +## Implementation Notes + + +Implemented with TDD. Added failing coverage first for standalone `れる`/`れた` auxiliary fragments and a lexical `くれ`/`くれる` guard. Updated the shared subtitle annotation filter to strip annotation metadata for kana-only auxiliary inflection fragments identified by MeCab POS (`助動詞` only, or `動詞/接尾` with optional trailing `助動詞`) while preserving lexical `くれ` as `くれる` when tagged `動詞/自立`. Added tokenizer integration coverage for `れた` and neighboring lexical N+1 behavior. + + +## Final Summary + + +Suppressed annotation metadata for standalone auxiliary inflection fragments such as `れる` and `れた` in subtitle tokens, leaving them hoverable but plain. Preserved lexical `くれ` -> `くれる` verb metadata when MeCab tags it as `動詞/自立`. + +Added unit and tokenizer regression coverage, plus a release fragment in `changes/311-auxiliary-inflection-annotation-filter.md`. + +Validation: targeted annotation/tokenizer tests passed; `bun run typecheck` passed; `bun run changelog:lint` passed. `bun run test:fast` was attempted twice and failed in unrelated `src/core/services/subsync.test.ts` cross-file state (`window.electronAPI` undefined), while `bun test src/core/services/subsync.test.ts` passes by itself. + diff --git a/backlog/tasks/task-312 - Suppress-ja-nai-explanatory-ending-subtitle-annotations.md b/backlog/tasks/task-312 - Suppress-ja-nai-explanatory-ending-subtitle-annotations.md new file mode 100644 index 00000000..987a822d --- /dev/null +++ b/backlog/tasks/task-312 - Suppress-ja-nai-explanatory-ending-subtitle-annotations.md @@ -0,0 +1,42 @@ +--- +id: TASK-312 +title: Suppress ja-nai explanatory ending subtitle annotations +status: Done +assignee: [] +created_date: '2026-05-02 09:55' +updated_date: '2026-05-02 10:03' +labels: + - tokenizer + - annotations + - bug +dependencies: [] +priority: medium +--- + +## Description + + +Suppress subtitle annotation styling for grammar-only explanatory endings like `じゃない` and `じゃないですか` while preserving nearby lexical content annotations. + + +## Acceptance Criteria + +- [x] #1 `じゃない` and `じゃないですか`-style endings render as plain hoverable subtitle tokens. +- [x] #2 The reported phrase `みたいなのあるじゃないですか` does not annotate `じゃない`/`じゃないですか` as lexical/frequency content. +- [x] #3 Regression tests cover unit filter behavior and tokenizer integration without suppressing lexical content tokens. +- [x] #4 Standalone polite copula endings such as `です` / `ですよ` render as plain hoverable subtitle tokens even if POS metadata is missing or too lexical. + + +## Implementation Notes + + +Added failing coverage first for `じゃない` / `じゃないですか` and `ですよ` leaking annotation metadata when POS metadata is missing or too lexical. Implemented term-family exclusions in the shared subtitle annotation filter for the `じゃない` explanatory family and polite copula suffix endings (`ですか`, `ですね`, `ですよ`, `ですな`). Kept bare `です` term-only behavior unchanged to preserve existing no-POS frequency tests; POS-tagged `です` is already stripped by the grammar POS exclusion path. + + +## Final Summary + + +Suppressed subtitle annotation metadata for grammar-only endings like `じゃないですか` and `ですよ`, while preserving nearby lexical content annotations. Added unit and tokenizer regression coverage for the reported `みたいなのあるじゃないですか` and `感じですよ` shapes, plus changelog fragment `changes/312-grammar-ending-annotation-filter.md`. + +Validation: `bun test src/core/services/tokenizer/annotation-stage.test.ts`; `bun test src/core/services/tokenizer.test.ts`; `bun run typecheck`; `bun run changelog:lint`; `git diff --check`. + diff --git a/backlog/tasks/task-315 - Suppress-annotations-for-standalone-じゃない-and-です-ending-tokens.md b/backlog/tasks/task-315 - Suppress-annotations-for-standalone-じゃない-and-です-ending-tokens.md new file mode 100644 index 00000000..ed32daef --- /dev/null +++ b/backlog/tasks/task-315 - Suppress-annotations-for-standalone-じゃない-and-です-ending-tokens.md @@ -0,0 +1,67 @@ +--- +id: TASK-315 +title: Suppress annotations for standalone じゃない and です ending tokens +status: Done +assignee: + - codex +created_date: '2026-05-03 00:02' +updated_date: '2026-05-03 06:05' +labels: + - bug + - tokenizer +dependencies: [] +priority: medium +--- + +## Description + + +Standalone `じゃない` grammar ending tokens should not display or persist subtitle annotations even if a dictionary assigns a rank or JLPT/known match. User observed `じゃない` still being marked frequent in overlay after tokenization produced it as a dictionary word. + + +## Acceptance Criteria + +- [x] #1 `じゃない` and `です` ending tokens have known-word, N+1, frequency, and JLPT annotation metadata cleared in subtitle annotation output. +- [x] #2 Common polite/question variants such as `じゃないですか` and `ですよ` remain excluded when tokenized as a single ending token. +- [x] #3 Regression coverage proves same-line Yomitan segments split content from trailing grammar endings so the content word can be annotated without coloring the ending. +- [x] #4 Auxiliary-only helper spans such as `てく` + `れた` in `ベアトリスがいてくれたから` have known-word, N+1, frequency, and JLPT annotation metadata cleared. +- [x] #5 Hard-coded grammar-ending phrase permutations are replaced by shared pattern matching, with parser selection and subtitle annotation filtering using the same grammar-ending classifier. + + +## Implementation Plan + + +1. Add a focused regression for `ベアトリスがいてくれたから` where Yomitan tokens include auxiliary-only `てく` and `れた` with pre-ranked/known/JLPT metadata candidates. +2. Run the targeted test to verify the regression fails before production changes. +3. Patch the shared subtitle annotation filter so kana-only auxiliary helper spans made only of grammar POS components are excluded while preserving lexical content tokens. +4. Re-run targeted tokenizer/annotation tests, then run SubMiner change verification classifier/verifier for the touched files. +5. Update TASK-315 acceptance criteria, notes, and final summary with commands and outcomes. + +Replace explicit standalone grammar-ending permutations with a compact shared matcher used by parser selection and annotation filtering. + +Add regression tests first for non-enumerated polite copula / ja-nai variants so the matcher behavior is proven, then refactor implementation and verify targeted lanes. + + +## Implementation Notes + + +Implemented as one focused tokenizer fix. Parser selection now splits dictionary-backed same-line grammar ending segments (`です`, `じゃない*`) from preceding content so annotation styling can apply only to the content token. Shared subtitle annotation filtering now treats bare `です` like the existing `ですか/ですよ/...` copula endings. + +2026-05-03: Reopened for approved add-on covering auxiliary-only `てく` + `れた` helper highlighting report. + +2026-05-03: Added regression coverage for `ベアトリスがいてくれたから` where Yomitan emits `てく` + `れた` and MeCab enrichment tags `てく` as `助詞|動詞` / `接続助詞|非自立`. The regression initially failed because `てく` kept `isKnown: true` and `jlptLevel: N4`. Added a shared-filter helper for kana-only particle+non-independent-verb helper spans, preserving lexical `自立` verbs. Verification: `bun test src/core/services/tokenizer/annotation-stage.test.ts`, `bun test src/core/services/tokenizer.test.ts`, `bun test src/core/services/tokenizer/parser-selection-stage.test.ts`, `bun x prettier --check ...`, and `bun run typecheck` passed. SubMiner verifier core lane passed typecheck but `bun run test:fast` failed on unrelated existing cross-suite issues: `window.electronAPI` undefined in `src/renderer/handlers/keyboard.ts` during `src/core/services/subsync.test.ts`, followed by Bun `node:test` nested-test cascade. + +2026-05-03: Reopened for follow-up requested by user: remove hard-coded standalone grammar-ending permutation list and lean on pattern/POS filtering where possible. + +2026-05-03: Added shared `grammar-ending.ts` matcher for polite copula, negative copula, and explanatory endings. Parser selection now uses the standalone-ending matcher instead of `STANDALONE_GRAMMAR_ENDINGS`. Shared subtitle filter now uses the same grammar classifier instead of generated phrase sets. Removed stale duplicate subtitle-exclusion helpers from `annotation-stage.ts`; annotation-stage continues to delegate subtitle exclusion to the shared filter. Verification passed: targeted tokenizer/parser/annotation tests, Prettier check, `bun run typecheck`, `bun run test:fast`, `bun run test:env`, `bun run build`, and `bun run test:smoke:dist`. `bun run changelog:lint` remains blocked by pre-existing malformed fragment `changes/319-interjection-annotation-filter.md`. + + +## Final Summary + + +Replaced grammar-ending phrase permutations with shared pattern matching. `parser-selection-stage.ts` now splits standalone grammar endings through `grammar-ending.ts` instead of `STANDALONE_GRAMMAR_ENDINGS`; `subtitle-annotation-filter.ts` uses the same classifier for polite copula, negative copula, and explanatory endings instead of generated exact phrase sets. + +Kept exclusion ownership cleaner: subtitle annotation exclusion remains in the shared filter, while `annotation-stage.ts` no longer carries stale duplicate subtitle-exclusion constants/helpers. Added regressions for pattern coverage including `ではないですか` splitting and no-POS grammar-ending annotation clearing. + +Verification passed: targeted tokenizer/parser/annotation tests, Prettier check, `bun run typecheck`, `bun run test:fast`, `bun run test:env`, `bun run build`, and `bun run test:smoke:dist`. `bun run changelog:lint` is blocked by pre-existing malformed `changes/319-interjection-annotation-filter.md`; new fragment `changes/321-grammar-ending-pattern-filter.md` uses the current metadata format. + diff --git a/backlog/tasks/task-316 - Fix-macOS-launcher-playback-exit-with-background-stats-daemon.md b/backlog/tasks/task-316 - Fix-macOS-launcher-playback-exit-with-background-stats-daemon.md new file mode 100644 index 00000000..48139e41 --- /dev/null +++ b/backlog/tasks/task-316 - Fix-macOS-launcher-playback-exit-with-background-stats-daemon.md @@ -0,0 +1,68 @@ +--- +id: TASK-316 +title: Fix macOS launcher playback exit with background stats daemon +status: Done +assignee: + - '@Codex' +created_date: '2026-05-03 00:32' +updated_date: '2026-05-03 00:36' +labels: + - bug + - macos + - mpv + - stats + - runtime +dependencies: [] +priority: high +--- + +## Description + + +Launching a video on macOS when SubMiner is not already running should not leave the regular SubMiner app/tray alive after mpv closes. A separately running background stats daemon must remain non-blocking and must not be used as a foreground app dependency during playback startup/shutdown. + + +## Acceptance Criteria + +- [x] #1 Closing a launcher/plugin-managed mpv session exits the launcher-started regular SubMiner app/tray after mpv closes. +- [x] #2 Explicit background/no-argument app launches still remain alive as before. +- [x] #3 A live background stats daemon is ignored by normal in-app stats server routing during regular app startup/playback, so the regular app never depends on or connects to that background daemon. +- [x] #4 Regression coverage demonstrates the managed playback shutdown and stats-daemon isolation behavior. + + +## Implementation Plan + + +1. Add failing regressions first: stats routing should ignore a live foreign background daemon for normal app URL/server startup, and managed playback disconnect should request app quit directly without reconnecting or depending on overlay/youtube disconnect guards. +2. Implement the narrow runtime changes in `src/main/runtime/stats-server-routing.ts` and, if needed, mpv disconnect plumbing in `src/core/services/mpv.ts` / event deps. +3. Preserve explicit persistent background/no-arg behavior by keeping `--managed-playback` as the only playback-exit marker. +4. Run focused tests (`stats-server-routing`, mpv client/protocol/event tests), then typecheck if focused checks pass. +5. Update changelog and task acceptance/final notes. + + +## Implementation Notes + + +Implemented regular app stats routing isolation from live background daemon state and explicit managed-playback quit-on-disconnect wiring in main mpv event deps. Existing `MpvIpcClient` socket-close managed playback quit path remains covered. + +`bun run test:fast` was attempted after focused verification. It failed in the broad `test:core:src` lane with Bun/node:test nested-test runner errors across many unrelated files and one transient subsync renderer API failure; rerunning the concrete subsync failure alone passed. Focused runtime tests, typecheck, and changelog lint remain green. + + +## Final Summary + + +Summary: +- Regular app stats server routing no longer returns or depends on a live background daemon URL; it validates/cleans state, then uses the local app stats server path. +- Managed playback is now explicitly treated as a quit-on-disconnect launch mode in main mpv event deps, in addition to the existing mpv socket-close quit request. +- Added regressions for background daemon isolation and managed playback quit-on-disconnect classification. +- Added changelog fragment `changes/316-macos-playback-stats-daemon.md`. + +Verification: +- `bun test src/main/runtime/stats-server-routing.test.ts src/core/services/mpv.test.ts src/core/services/mpv-protocol.test.ts src/main/runtime/mpv-client-event-bindings.test.ts src/main/runtime/mpv-main-event-bindings.test.ts src/main/runtime/mpv-main-event-main-deps.test.ts` +- `bun run typecheck` +- `bun run changelog:lint` +- `bun test src/core/services/subsync.test.ts --test-name-pattern "deterministic _retimed"` + +Blocked broader gate: +- `bun run test:fast` failed in `test:core:src` with Bun/node:test nested-test runner errors across unrelated files; the concrete subsync failure from that run passed when isolated. + diff --git a/backlog/tasks/task-318 - Keep-JLPT-underline-color-fixed-after-lookup-selection.md b/backlog/tasks/task-318 - Keep-JLPT-underline-color-fixed-after-lookup-selection.md new file mode 100644 index 00000000..15a8d5e3 --- /dev/null +++ b/backlog/tasks/task-318 - Keep-JLPT-underline-color-fixed-after-lookup-selection.md @@ -0,0 +1,53 @@ +--- +id: TASK-318 +title: Keep JLPT underline color fixed after lookup selection +status: Done +assignee: + - '@Codex' +created_date: '2026-05-03 03:17' +updated_date: '2026-05-03 03:19' +labels: + - overlay + - jlpt + - renderer +dependencies: [] +priority: medium +--- + +## Description + + +Looking up a subtitle token can leave browser/Yomitan selection styling active. If that token has a JLPT class and another annotation class, the underline must remain the JLPT level color because underline color represents static JLPT classification, not the currently active annotation or lookup state. + + +## Acceptance Criteria + +- [x] #1 JLPT subtitle underlines retain their configured N1-N5 color after lookup/selection styling is applied. +- [x] #2 JLPT tokens that also have known, N+1, name, or frequency annotation classes keep their annotation text color behavior without changing the JLPT underline color. +- [x] #3 Renderer regression coverage verifies the CSS contract for the combined JLPT plus annotation case. + + +## Implementation Plan + + +1. Add a focused CSS regression in `src/renderer/subtitle-render.test.ts` for JLPT tokens combined with higher-priority annotation classes and lookup/selection styling. +2. Run the focused renderer test and confirm it fails because selection rules do not lock `text-decoration-color`. +3. Update `src/renderer/style.css` to explicitly preserve JLPT underline decoration color in lookup/selection state selectors without changing text color priority. +4. Re-run the focused renderer test, then run the smallest relevant verification gate. + + +## Implementation Notes + + +Verified TDD red/green for renderer CSS contract: `bun test src/renderer/subtitle-render.test.ts` first failed because `word-jlpt-n1::selection` lock was missing, then passed after adding explicit JLPT `text-decoration-color` selection rules. Also ran `bun run changelog:lint` and `bun run typecheck` successfully. + + +## Final Summary + + +Fixed JLPT subtitle underline color drift after dictionary lookup/selection by adding explicit `::selection` decoration-color locks for N1-N5 token classes in `src/renderer/style.css`. This preserves the JLPT underline as static classification while leaving known/N+1/name/frequency text color priority intact. + +Added renderer CSS regression coverage for the JLPT selection lock and a user-visible changelog fragment. + +Checks: `bun test src/renderer/subtitle-render.test.ts`; `bun run changelog:lint`; `bun run typecheck`. + diff --git a/backlog/tasks/task-319 - Suppress-annotations-for-expressive-interjection-subtitles.md b/backlog/tasks/task-319 - Suppress-annotations-for-expressive-interjection-subtitles.md new file mode 100644 index 00000000..420bf4d7 --- /dev/null +++ b/backlog/tasks/task-319 - Suppress-annotations-for-expressive-interjection-subtitles.md @@ -0,0 +1,58 @@ +--- +id: TASK-319 +title: Suppress annotations for expressive interjection subtitles +status: Done +assignee: + - Codex +created_date: '2026-05-03 03:18' +updated_date: '2026-05-03 03:20' +labels: + - bug + - subtitle-annotations +dependencies: [] +references: + - src/core/services/tokenizer/subtitle-annotation-filter.ts + - src/core/services/tokenizer/annotation-stage.test.ts +priority: medium +--- + +## Description + + +Interjection-only subtitle tokens such as ハァ and はっ should remain hoverable as tokens but must not receive known, N+1, frequency, or JLPT annotation styling. Current behavior can still annotate these forms when dictionary/POS metadata does not trip the existing exclusion gate. + + +## Acceptance Criteria + +- [x] #1 Standalone ハァ/はっ-style interjection tokens have annotation metadata cleared even when dictionary metadata exists. +- [x] #2 Filtering remains scoped so content-bearing non-interjection tokens still receive annotations. +- [x] #3 Regression coverage exercises the reported subtitle pattern: ハァ… / (ガーフィール)はっ! + + +## Implementation Plan + + +1. Add failing regression coverage around annotation filtering for the reported interjection forms, including katakana ハァ and small-tsu はっ with surrounding subtitle punctuation/name text. +2. Tighten the shared subtitle annotation exclusion gate so expressive kana interjections clear annotation metadata without relying only on MeCab pos1=感動詞. +3. Run the focused tokenizer/annotation tests, then update acceptance criteria and notes. + + +## Implementation Notes + + +Implemented via shared subtitle annotation exclusion term normalization: added はぁ so katakana ハァ normalizes into the existing term gate. Existing small-tsu kana SFX logic already covers はっ. Regression confirms both reported forms clear known/N+1/frequency/JLPT metadata while a normal noun keeps frequency annotation. + + +## Final Summary + + +Summary: +- Added a regression for the reported subtitle pattern ハァ… / (ガーフィール)はっ!, with annotation metadata present on both interjection tokens. +- Extended the shared subtitle annotation exclusion term set so ハァ normalizes to はぁ and is stripped of annotation styling. Existing はっ handling remains covered by small-tsu kana SFX filtering. +- Added a change fragment for the user-visible bug fix. + +Verification: +- bun test src/core/services/tokenizer/annotation-stage.test.ts +- bun test src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer.test.ts src/renderer/subtitle-render.test.ts +- bun run typecheck + diff --git a/backlog/tasks/task-320 - Refresh-current-subtitle-known-word-highlight-after-successful-mining.md b/backlog/tasks/task-320 - Refresh-current-subtitle-known-word-highlight-after-successful-mining.md new file mode 100644 index 00000000..be8f57e2 --- /dev/null +++ b/backlog/tasks/task-320 - Refresh-current-subtitle-known-word-highlight-after-successful-mining.md @@ -0,0 +1,58 @@ +--- +id: TASK-320 +title: Refresh current subtitle known-word highlight after successful mining +status: Done +assignee: + - Codex +created_date: '2026-05-03 03:22' +updated_date: '2026-05-03 03:29' +labels: + - bug + - anki + - subtitle-annotations +dependencies: [] +priority: medium +--- + +## Description + + +After a sentence card is mined successfully, the mined word is added to the known-word cache and future subtitle appearances render as known. The currently displayed subtitle must also be refreshed immediately so the mined word turns known-color without waiting for a later cue. + + +## Acceptance Criteria + +- [x] #1 Successful sentence-card mining refreshes the current displayed subtitle so newly mined known words render immediately. +- [x] #2 Unsuccessful/no-op mining does not refresh the current subtitle. +- [x] #3 Regression coverage verifies the successful and unsuccessful mining paths. + + +## Implementation Plan + + +1. Add a regression test around AnkiIntegration known-word cache appends: when mined note info changes known words, a callback fires. +2. Make KnownWordCacheManager.appendFromNoteInfo report whether it changed the immediate known-word cache. +3. Add an AnkiIntegration known-word-cache-updated callback and invoke it after successful immediate append. +4. Wire main process callback to subtitleProcessingController.refreshCurrentSubtitle(appState.currentSubText), forcing active-line retokenization after popup/proxy or local mining updates the known-word cache. +5. Add a changelog fragment and run targeted tests plus typecheck. + + +## Implementation Notes + + +Implemented generic known-word-cache update notification instead of shortcut-only refresh. KnownWordCacheManager.appendFromNoteInfo now returns whether in-memory known words changed; AnkiIntegration notifies a callback after successful append. Main process wires that callback to subtitleProcessingController.refreshCurrentSubtitle(appState.currentSubText), forcing retokenization without using stale prefetch/cache data. Added regression coverage in anki-integration.test.ts. + + +## Final Summary + + +Summary: +- Added a known-word-cache update callback on AnkiIntegration and wired it in the main process to refresh the current subtitle after mined note info changes known words. +- Made KnownWordCacheManager.appendFromNoteInfo report whether it changed the known-word cache, so refresh only happens after an actual immediate known-word append. +- Added regression coverage proving mined note info updates known words and emits the update notification. + +Verification: +- bun test src/anki-integration.test.ts src/anki-integration/known-word-cache.test.ts src/main/runtime/anki-actions.test.ts src/main/runtime/anki-actions-main-deps.test.ts +- bun run typecheck +- bun run changelog:lint currently blocked by pre-existing invalid metadata in changes/319-interjection-annotation-filter.md. + diff --git a/backlog/tasks/task-321 - Preserve-word-audio-during-manual-clipboard-card-updates.md b/backlog/tasks/task-321 - Preserve-word-audio-during-manual-clipboard-card-updates.md new file mode 100644 index 00000000..431bbeb0 --- /dev/null +++ b/backlog/tasks/task-321 - Preserve-word-audio-during-manual-clipboard-card-updates.md @@ -0,0 +1,63 @@ +--- +id: TASK-321 +title: Preserve word audio during manual clipboard card updates +status: Done +assignee: + - '@Codex' +created_date: '2026-05-03 06:22' +updated_date: '2026-05-03 06:23' +labels: + - anki + - mining +dependencies: [] +priority: medium +--- + +## Description + + +Manual Ctrl+Shift+C/Ctrl+V card updates on already-mined cards should refresh the sentence content and generated sentence media without removing or replacing the existing word/expression audio. The word is unchanged in this flow, so the configured word audio field must be left untouched while sentence audio remains forced-overwrite behavior from TASK-299. + + +## Acceptance Criteria + +- [x] #1 Manual clipboard subtitle update replaces the resolved sentence audio field with newly generated sentence audio. +- [x] #2 Manual clipboard subtitle update does not include the configured word/expression audio field in Anki field updates. +- [x] #3 Animated image generation still uses the existing word audio duration for lead-in sync when configured. +- [x] #4 A regression test covers preserving word/expression audio during manual clipboard update. + + +## Implementation Plan + + +1. Update the focused manual clipboard card update regression so generated audio is written only to the resolved sentence audio field and the configured word/expression audio field is absent from updateNoteFields payloads. +2. Run the focused test and confirm it fails for the existing TASK-299 behavior. +3. Change CardCreationService.updateLastAddedFromClipboard to stop merging/updating expression audio while preserving forced overwrite for sentence audio. +4. Run the focused test; then run adjacent Anki card-creation tests if the focused gate passes. +5. Update task acceptance criteria/final notes with verification results. + + +## Implementation Notes + + +Implemented narrow manual clipboard update change in CardCreationService.updateLastAddedFromClipboard: generated audio now force-overwrites only the resolved sentence audio field and no longer writes the configured word/expression audio field. Animated AVIF lead-in still runs from the original note info before image generation, preserving existing word-audio sync behavior. + + +## Final Summary + + +Summary: +- Manual Ctrl+Shift+C/Ctrl+V card updates now leave the configured word/expression audio field untouched while force-replacing the resolved sentence audio field. +- Updated the regression test to assert the Anki update payload omits ExpressionAudio and only merges SentenceAudio with forced overwrite. +- Updated docs-site behavior notes and added a changelog fragment for the sentence-only manual audio replacement behavior. + +Verification: +- bun test src/anki-integration/card-creation-manual-update.test.ts src/anki-integration/card-creation.test.ts src/anki-integration/animated-image-sync.test.ts +- bun run typecheck +- bun run docs:test +- bun run docs:build +- git diff --check -- src/anki-integration/card-creation.ts src/anki-integration/card-creation-manual-update.test.ts docs-site/mining-workflow.md docs-site/anki-integration.md docs-site/configuration.md changes/322-preserve-word-audio-manual-update.md + +Blocked gate: +- bun run changelog:lint is blocked by pre-existing malformed changes/319-interjection-annotation-filter.md, which is outside this task's files. + diff --git a/backlog/tasks/task-322 - Fix-failing-CI-checks-on-PR-57.md b/backlog/tasks/task-322 - Fix-failing-CI-checks-on-PR-57.md new file mode 100644 index 00000000..6fd6ea1d --- /dev/null +++ b/backlog/tasks/task-322 - Fix-failing-CI-checks-on-PR-57.md @@ -0,0 +1,58 @@ +--- +id: TASK-322 +title: Fix failing CI checks on PR 57 +status: Done +assignee: + - codex +created_date: '2026-05-03 06:27' +updated_date: '2026-05-03 06:31' +labels: + - ci + - bug +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/57' +priority: high +--- + +## Description + + +Investigate and fix failing GitHub Actions checks on PR #57 (`tokenizer-updates`). Scope: use CI logs to identify root cause, apply focused local fix, and verify with relevant local checks. + + +## Acceptance Criteria + +- [x] #1 Failing GitHub Actions check root cause is identified from logs. +- [x] #2 A focused code/test/docs fix is applied locally. +- [x] #3 Relevant local verification passes or blocked reason is documented. +- [x] #4 PR checks are rechecked or next CI action is documented. +- [x] #5 Actionable CodeRabbit PR comments are inspected and addressed or documented as non-actionable. + + +## Implementation Plan + + +1. Fix CI changelog lint by adding a valid `type` frontmatter value to `changes/319-interjection-annotation-filter.md`. +2. Address unresolved CodeRabbit threads: + - `scripts/test-plugin-session-bindings.lua`: make `.tmp` creation portable across Unix/Windows shells. + - `src/core/services/tokenizer.ts`: pass `TokenizerAnnotationOptions` through `stripSubtitleAnnotationMetadata` paths so `sourceText` is honored. + - `src/main/runtime/mpv-main-event-main-deps.ts`: align overlay-runtime quit-on-disconnect predicate with `hasInitialPlaybackQuitOnDisconnectArg`. + - `src/renderer/handlers/mouse.test.ts`: make `elementFromPoint` stubs coordinate-sensitive. +3. Run focused checks: `bun run changelog:lint`, relevant tokenizer/main/mouse tests, and plugin Lua test path if available. +4. Recheck PR checks/comments after local verification. + + +## Implementation Notes + + +CI root cause: GitHub Actions `build-test-audit` failed during `bun run changelog:lint`; `changes/319-interjection-annotation-filter.md` must declare `type` as one of `added`, `changed`, `fixed`, `docs`, `internal`. Scope expanded by user to also address CodeRabbit comments on PR #57. + +Implemented CI changelog metadata fix and unresolved CodeRabbit feedback locally. Full verification run: `bun run changelog:lint`, focused tests, `bun run typecheck`, `bun run test:fast`, `bun run test:env`, `bun run build`, `bun run test:smoke:dist`, `bun run format:check:src`. Rechecked PR checks: remote `build-test-audit` still shows the old failing run until this branch is pushed; CodeRabbit remains pending remotely until review reruns. + + +## Final Summary + + +Fixed PR #57 CI failure by converting `changes/319-interjection-annotation-filter.md` to valid changelog fragment metadata. Addressed unresolved CodeRabbit feedback by making plugin test `.tmp` creation portable, threading tokenizer annotation options through metadata stripping, aligning quit-on-disconnect predicates for Jellyfin playback, and strengthening mouse hit-test assertions. Also formatted two existing PR files required by the source format gate. Verification passed locally: changelog lint, focused tests, typecheck, test:fast, test:env, build, smoke dist, and format check. Remote PR checks still show the previous failed `build-test-audit` run until these local changes are pushed. + diff --git a/backlog/tasks/task-323 - Fix-macOS-overlay-hiding-while-mpv-remains-active.md b/backlog/tasks/task-323 - Fix-macOS-overlay-hiding-while-mpv-remains-active.md new file mode 100644 index 00000000..d2a480e6 --- /dev/null +++ b/backlog/tasks/task-323 - Fix-macOS-overlay-hiding-while-mpv-remains-active.md @@ -0,0 +1,60 @@ +--- +id: TASK-323 +title: Fix macOS overlay hiding while mpv remains active +status: Done +assignee: + - '@codex' +created_date: '2026-05-03 07:41' +updated_date: '2026-05-03 07:48' +labels: + - bug + - macos + - overlay +dependencies: [] +references: + - src/core/services/overlay-visibility.ts + - src/window-trackers/macos-tracker.ts +priority: high +--- + +## Description + + +macOS visible overlay can hide/reload during normal playback even while mpv, or the overlay over mpv, remains the active viewing surface. The fix should preserve overlay visibility and subtitle continuity during transient macOS focus/tracker flaps, while still hiding the overlay when the tracked mpv window is genuinely unavailable or another app is brought forward. + + +## Acceptance Criteria + +- [x] #1 When the macOS tracker has recent valid mpv geometry, transient focus/helper misses do not hide the visible overlay or force a reload. +- [x] #2 The overlay still hides when the tracked mpv window is genuinely lost beyond the existing tracking grace behavior. +- [x] #3 A regression test covers the macOS active-playback case where mpv/overlay focus is preserved despite a transient non-tracking state. +- [x] #4 Relevant docs or task notes are updated if behavior or verification guidance changes. + + +## Implementation Plan + + +1. Add a failing regression in `src/core/services/overlay-visibility.test.ts`: on macOS, after the overlay is visible/tracked, a transient tracker state with `isTracking() === false` but non-null `getGeometry()` keeps the overlay visible, updates bounds, and does not call `hide()` or loading OSD. +2. Implement the minimal macOS preserve path in `src/core/services/overlay-visibility.ts`, mirroring the existing Windows transient non-minimized branch but without Windows z-order binding. +3. Preserve existing startup/lost-window behavior: `windowTracker: null` and `isTracking() === false` with `getGeometry() === null` still hide and show the first loading OSD. +4. Run focused tests for `src/core/services/overlay-visibility.test.ts`; then typecheck or the repo runtime verification lane if the focused patch passes. +5. Update TASK-323 notes/acceptance criteria with verification results. + + +## Implementation Notes + + +Added a macOS overlay visibility regression for transient tracker loss with retained geometry. The test failed first because the old path marked tracker-not-ready and hid the overlay. Implemented a scoped preserve path in `src/core/services/overlay-visibility.ts`: macOS now keeps the visible overlay alive only when the tracker still has retained geometry; true loss with null geometry still hides and emits the existing loading OSD behavior. Added changelog fragment `changes/323-macos-overlay-tracker-flaps.md`. + +Verification: `bun test src/core/services/overlay-visibility.test.ts` passed after the fix; `bun test src/window-trackers/macos-tracker.test.ts src/core/services/overlay-visibility.test.ts` passed; `bun run typecheck` passed; `bun run test:env` passed; isolated `bun test src/core/services/subsync.test.ts` passed; `bun run build` passed; `bun run test:smoke:dist` passed; `bun run changelog:lint` passed. `bun run test:fast` failed twice in an unrelated broad-suite interaction where `src/renderer/handlers/keyboard.ts` tried to use missing `window.electronAPI` while `src/core/services/subsync.test.ts` was running, followed by Bun node:test nested-test cascade errors. + + +## Final Summary + + +Fixed the macOS visible-overlay hide/reload path during normal playback by preserving the overlay when the tracker briefly reports non-tracking but still has retained mpv geometry. The overlay visibility service now treats that macOS state like a transient tracker flap: it keeps bounds/layer/order refreshed and leaves the overlay click-through instead of hiding or showing the loading OSD. True macOS loss remains unchanged: no tracker or null geometry still hides the overlay and uses the existing loading behavior. + +Added regression coverage in `src/core/services/overlay-visibility.test.ts` for the active-playback case and added changelog fragment `changes/323-macos-overlay-tracker-flaps.md`. + +Verification passed: focused overlay tests, macOS tracker + overlay tests, typecheck, `test:env`, isolated `subsync.test.ts`, build, dist smoke, and changelog lint. Full `test:fast` remains blocked by an unrelated broad-suite interaction where renderer keyboard state fires without `window.electronAPI` during `subsync.test.ts`, then Bun reports node:test cascade errors. + diff --git a/backlog/tasks/task-324 - Fix-mpv-playlist-changes-re-running-app-warmups.md b/backlog/tasks/task-324 - Fix-mpv-playlist-changes-re-running-app-warmups.md new file mode 100644 index 00000000..f7a9f62d --- /dev/null +++ b/backlog/tasks/task-324 - Fix-mpv-playlist-changes-re-running-app-warmups.md @@ -0,0 +1,63 @@ +--- +id: TASK-324 +title: Fix mpv playlist changes re-running app warmups +status: Done +assignee: [] +created_date: '2026-05-03 07:48' +updated_date: '2026-05-03 07:52' +labels: + - bug + - mpv + - overlay +dependencies: [] +references: + - launcher/ + - src/core/services/mpv.ts + - src/main/runtime/ +priority: medium +--- + +## Description + + +When moving to the next or previous mpv playlist entry, SubMiner should reconnect the existing app/runtime to mpv instead of treating the new video like a fresh app startup. Re-running startup warmups or creating another app session after the first video can interfere with overlay behavior. + + +## Acceptance Criteria + +- [x] #1 Changing to next or previous mpv playlist item reuses the existing app/runtime instead of launching a new app session. +- [x] #2 Startup warmups are not repeated for playlist item changes after the first app startup. +- [x] #3 Overlay behavior remains available after playlist navigation. +- [x] #4 Regression test covers the playlist-change/reconnect path. + + +## Implementation Plan + + +1. Reproduce the plugin auto-start regression with a failing Lua start-gate test. +2. Update mpv plugin auto-start handling so playlist/file changes with an already-running overlay reuse the existing app path and do not re-arm pause-until-ready warmup. +3. Add changelog fragment and run plugin/launcher verification. + + +## Implementation Notes + + +RED: `lua scripts/test-plugin-start-gate.lua` failed after changing the duplicate pause-until-ready auto-start expectations; it showed the loading gate was armed twice while overlay was already running. + +GREEN: `plugin/subminer/process.lua` now disarms any old ready gate and only reasserts visible overlay state when auto-start fires while `state.overlay_running` is already true. + + +## Final Summary + + +Summary: +- Updated the mpv Lua plugin auto-start reuse path so a file/playlist load with an already-running overlay no longer re-arms the pause-until-ready tokenization gate. +- Kept the existing app/control command reuse behavior: subsequent auto-starts reassert visible/hidden overlay state without issuing another `--start` subprocess. +- Added a changelog fragment for the mpv playlist overlay reuse fix. + +Tests: +- `lua scripts/test-plugin-start-gate.lua` (red before fix, green after) +- `bun run test:plugin:src` +- `bun run changelog:lint` +- `bun run test:launcher:env:src` + diff --git a/backlog/tasks/task-325 - Keep-JLPT-underline-color-fixed-with-combined-lookup-annotations.md b/backlog/tasks/task-325 - Keep-JLPT-underline-color-fixed-with-combined-lookup-annotations.md new file mode 100644 index 00000000..51ed982e --- /dev/null +++ b/backlog/tasks/task-325 - Keep-JLPT-underline-color-fixed-with-combined-lookup-annotations.md @@ -0,0 +1,56 @@ +--- +id: TASK-325 +title: Keep JLPT underline color fixed with combined lookup annotations +status: Done +assignee: + - '@Codex' +created_date: '2026-05-04 00:25' +updated_date: '2026-05-04 00:28' +labels: + - overlay + - jlpt + - renderer +dependencies: [] +references: + - TASK-318 + - TASK-308 +priority: medium +--- + +## Description + + +Yomitan lookup on a subtitle token that has a JLPT level plus another annotation such as frequency or known-word highlighting can make the JLPT underline take the other annotation color. The underline must always remain the token's JLPT level color; other annotation classes may still control text color. + + +## Acceptance Criteria + +- [x] #1 A JLPT token combined with frequency styling keeps its underline set to the configured JLPT level color during lookup/selection styling. +- [x] #2 A JLPT token combined with known-word styling keeps its underline set to the configured JLPT level color during lookup/selection styling. +- [x] #3 Regression coverage exercises combined JLPT plus non-JLPT annotation selectors, including character span selection/hover styling used by lookup. + + +## Implementation Plan + + +1. Add focused renderer CSS regression coverage for combined `word-jlpt-n*` plus known/frequency classes, including `.c::selection`/`.c:hover` lookup paths. +2. Run `bun test src/renderer/subtitle-render.test.ts` and confirm the new assertion fails on the current CSS. +3. Update `src/renderer/style.css` so JLPT decoration color is locked on the token and child character spans without changing text color priority for known/frequency/name/N+1 annotations. +4. Re-run the focused renderer test, then run typecheck/changelog checks as scope requires. + + +## Implementation Notes + + +Added red/green renderer CSS regression for combined JLPT plus known/N+1/frequency annotation classes and character hover lookup paths. Current CSS failed before the lock selectors were added; focused test passes after the CSS change. + + +## Final Summary + + +Fixed JLPT underline color drift for tokens that also carry known-word, N+1, or frequency annotation classes. The renderer CSS now explicitly locks the underline decoration color for combined JLPT annotation selectors, hover, character hover, and selection states while preserving the existing text color priority for other annotations. + +Added renderer regression coverage for combined JLPT plus non-JLPT annotation selectors and lookup character hover paths. Added a user-visible changelog fragment. + +Checks: `bun test src/renderer/subtitle-render.test.ts`; `bun run changelog:lint`; `bun run typecheck`; `bun run format:check:src`. + diff --git a/backlog/tasks/task-326 - Fix-AniList-post-watch-update-after-skipped-completion-threshold.md b/backlog/tasks/task-326 - Fix-AniList-post-watch-update-after-skipped-completion-threshold.md new file mode 100644 index 00000000..33487b28 --- /dev/null +++ b/backlog/tasks/task-326 - Fix-AniList-post-watch-update-after-skipped-completion-threshold.md @@ -0,0 +1,32 @@ +--- +id: TASK-326 +title: Fix AniList post-watch update after skipped completion threshold +status: In Progress +assignee: [] +created_date: '2026-05-04 00:33' +labels: + - anilist + - bug +dependencies: [] +priority: high +--- + +## Description + + +AniList episode progress should sync reliably when playback reaches or passes the watched trigger point, even if mpv progress events jump over the exact threshold. Investigate why a completed watched episode did not update AniList and fix the root cause for post-watch tracking. + + +## Acceptance Criteria + +- [x] #1 When playback moves from before the completion threshold to any later position at or beyond the threshold, AniList queues or sends the episode progress update once. +- [x] #2 If playback is already past the completion threshold and the update has not yet been recorded for the current media/episode, AniList still queues or sends the update. +- [x] #3 AniList progress updates remain deduplicated for the same media/episode watch completion. +- [x] #4 A regression test covers the skipped-threshold or already-past-threshold case. + + +## Notes + +- Fixed mpv `time-pos` ordering so post-watch checks read the fresh playback position after seeks. +- Wired manual mark-watched to run a forced AniList post-watch sync after the local watched mark succeeds. +- Added regressions for time-position ordering, manual watched sync, forced post-watch updates, and the Little Witch Academia filename parse. diff --git a/backlog/tasks/task-327 - Restore-stats-daemon-deferral-when-launching-playback.md b/backlog/tasks/task-327 - Restore-stats-daemon-deferral-when-launching-playback.md new file mode 100644 index 00000000..9753af94 --- /dev/null +++ b/backlog/tasks/task-327 - Restore-stats-daemon-deferral-when-launching-playback.md @@ -0,0 +1,67 @@ +--- +id: TASK-327 +title: Restore stats daemon deferral when launching playback +status: Done +assignee: + - '@Codex' +created_date: '2026-05-04 01:15' +updated_date: '2026-05-04 01:17' +labels: + - bug + - stats + - runtime +dependencies: [] +priority: high +--- + +## Description + + +Launching a video while a background stats daemon is already running must not fail with stats.serverPort already in use. Normal in-app stats startup should reuse the live daemon URL instead of binding a second stats server, while preserving managed playback shutdown behavior from TASK-316. + + +## Acceptance Criteria + +- [x] #1 A live background stats daemon from another process causes in-app stats URL resolution to return the daemon URL without starting a local stats server. +- [x] #2 Dead or stale daemon state is removed and local stats startup still works. +- [x] #3 Managed playback shutdown behavior remains covered by existing tests. +- [x] #4 Focused regression tests pass. + + +## Implementation Plan + + +1. Update `src/main/runtime/stats-server-routing.test.ts` first so a live foreign daemon must return its daemon URL and skip local server startup. +2. Run the focused routing test to confirm the regression fails red. +3. Update `src/main/runtime/stats-server-routing.ts` to return `{ source: 'background' }` for live foreign daemon state, clear stale/self-owned state, and keep local startup fallback unchanged. +4. Run focused stats routing tests plus managed playback tests touched by TASK-316. +5. Update changelog and task acceptance/final notes. + + +## Implementation Notes + + +Implemented via TDD: first changed `stats-server-routing.test.ts` to require live foreign daemon deferral and observed the expected red failure. Then restored `stats-server-routing.ts` to return the daemon URL with `source: 'background'` when daemon state belongs to a live other process. Stale/dead and self-owned stale cleanup paths remain local fallback. + +Verification passed: `bun test src/main/runtime/stats-server-routing.test.ts`; focused runtime suite for stats daemon + TASK-316 managed playback files; `bun run typecheck`; `bun run test:fast`. + +`bun run changelog:lint` is blocked by pre-existing unrelated `changes/326-anilist-time-position-post-watch.md` missing valid `type` metadata; `changes/327-stats-daemon-deferral.md` follows the expected fragment format. + + +## Final Summary + + +Summary: +- Restored in-app stats startup deferral to a live background stats daemon from another process, returning the daemon URL and skipping local stats server binding. +- Kept stale/dead daemon cleanup and local stats startup fallback behavior intact. +- Added a changelog fragment for the restored port-conflict fix. + +Verification: +- `bun test src/main/runtime/stats-server-routing.test.ts` +- `bun test src/main/runtime/stats-server-routing.test.ts src/core/services/mpv.test.ts src/core/services/mpv-protocol.test.ts src/main/runtime/mpv-client-event-bindings.test.ts src/main/runtime/mpv-main-event-bindings.test.ts src/main/runtime/mpv-main-event-main-deps.test.ts src/main/runtime/stats-cli-command.test.ts src/stats-daemon-control.test.ts` +- `bun run typecheck` +- `bun run test:fast` + +Blocked check: +- `bun run changelog:lint` fails on unrelated pre-existing `changes/326-anilist-time-position-post-watch.md` metadata, not this change. + diff --git a/backlog/tasks/task-328 - Keep-subtitle-prefetch-running-after-immediate-cached-annotation-render.md b/backlog/tasks/task-328 - Keep-subtitle-prefetch-running-after-immediate-cached-annotation-render.md new file mode 100644 index 00000000..cf09a4d0 --- /dev/null +++ b/backlog/tasks/task-328 - Keep-subtitle-prefetch-running-after-immediate-cached-annotation-render.md @@ -0,0 +1,64 @@ +--- +id: TASK-328 +title: Keep subtitle prefetch running after immediate cached annotation render +status: Done +assignee: + - codex +created_date: '2026-05-04 01:26' +updated_date: '2026-05-04 01:30' +labels: [] +dependencies: [] +references: + - >- + /home/sudacode/projects/japanese/SubMiner/src/main/runtime/mpv-main-event-actions.ts + - /home/sudacode/projects/japanese/SubMiner/src/main.ts + - >- + /home/sudacode/projects/japanese/SubMiner/src/core/services/subtitle-processing-controller.ts + - >- + /home/sudacode/projects/japanese/SubMiner/backlog/completed/task-197 - + Eliminate-per-line-plain-subtitle-flash-on-prefetch-cache-hit.md + - >- + /home/sudacode/projects/japanese/SubMiner/backlog/completed/task-196 - + Fix-subtitle-prefetch-cache-key-mismatch-and-active-cue-window.md +priority: high +--- + +## Description + + +Cached subtitle annotation hits should render annotated subtitles immediately without starving the subtitle prefetcher. Current evidence: the mpv subtitle-change path emits the cached payload before forwarding the subtitle change; in the runtime, the cached emit resumes prefetch, then the forwarded change pauses it, and no async controller emit follows on a cache hit to resume it again. + + +## Acceptance Criteria + +- [x] #1 Cached subtitle annotation payloads still render immediately without a plain subtitle flash. +- [x] #2 A cache-hit subtitle-change event leaves subtitle prefetch eligible to continue after the immediate annotated emit. +- [x] #3 Cache-miss subtitle-change behavior still shows plain text immediately while async annotation processing runs. +- [x] #4 Regression coverage proves the cache-hit ordering that prevents prefetch from staying paused. + + +## Implementation Plan + + +1. Add a focused regression test in `src/main/runtime/mpv-main-event-actions.test.ts` proving cache-hit subtitle changes pause live prefetch work before emitting the immediate annotated payload, so the emit resumes prefetch last. +2. Change `createHandleMpvSubtitleChangeHandler` ordering in `src/main/runtime/mpv-main-event-actions.ts`: set current text, consume cache, forward `onSubtitleChange(text)`, then emit cached payload or plain fallback, then refresh Discord presence. +3. Preserve existing behavior: cache hits emit annotated payload synchronously; cache misses emit `{ text, tokens: null }` synchronously. +4. Run focused tests for `mpv-main-event-actions`; run adjacent controller/prefetch tests if ordering touches cache assumptions. +5. Update TASK-328 acceptance criteria and add a changelog fragment if the repo requires one for this user-visible fix. + + +## Implementation Notes + + +Red/green: added cache-hit ordering regression in `src/main/runtime/mpv-main-event-actions.test.ts`; first run failed with actual order `emit:annotated` before `process:line`. Fix narrows ordering change to cache hits only: cache hit calls `onSubtitleChange` before immediate annotated emit; cache miss keeps plain broadcast before processing. + +Verification: `bun test src/main/runtime/mpv-main-event-actions.test.ts` passed; `bun test src/core/services/subtitle-processing-controller.test.ts` passed; `bun test src/core/services/subtitle-prefetch.test.ts` passed; combined targeted test command passed 35 tests; `bun run typecheck` passed. `bun run changelog:lint` blocked by unrelated pre-existing `changes/326-anilist-time-position-post-watch.md` missing a valid `type` metadata line. + + +## Final Summary + + +Fixed the subtitle cache-hit ordering that could leave subtitle prefetch paused after an immediate annotated render. Cache hits now forward the subtitle change first, then emit the cached annotated payload, so the runtime pause happens before the emit path resumes prefetch. Cache misses keep the previous plain-subtitle-first path so fallback text still appears immediately while tokenization runs. + +Added a regression test for the cache-hit ordering and a changelog fragment for the overlay fix. Verified with targeted subtitle runtime/controller/prefetch tests and `bun run typecheck`; changelog lint is blocked by an unrelated existing malformed fragment for TASK-326. + diff --git a/backlog/tasks/task-331 - Address-unresolved-CodeRabbit-comments-on-PR-57.md b/backlog/tasks/task-331 - Address-unresolved-CodeRabbit-comments-on-PR-57.md new file mode 100644 index 00000000..1aacb8e7 --- /dev/null +++ b/backlog/tasks/task-331 - Address-unresolved-CodeRabbit-comments-on-PR-57.md @@ -0,0 +1,37 @@ +--- +id: TASK-331 +title: Address unresolved CodeRabbit comments on PR 57 +status: Done +assignee: + - codex +created_date: '2026-05-04 03:21' +updated_date: '2026-05-04 03:27' +labels: + - pr-feedback + - coderabbit +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/57' +priority: medium +--- + +## Description + + +Assess and fix unresolved CodeRabbit review comments on PR #57 after rebasing tokenizer-updates. Scope includes manual clipboard SentenceAudio guard, tokenizer standalone particle blacklist, AniList guessit fallback confidence, startup gate duplicate auto-start, and small regression-test hardening where applicable. + + +## Acceptance Criteria + +- [x] #1 Each unresolved CodeRabbit comment is either fixed or explicitly assessed as not applicable against current code. +- [x] #2 Regression tests cover behavior changes where practical. +- [x] #3 Relevant focused tests and typecheck pass. + + +## Implementation Notes + + +Fixed all verified actionable CodeRabbit comments from PR #57: manual clipboard updates no longer fall back to ExpressionAudio when SentenceAudio is absent, connective particle phrases no longer suppress lexical verb readings like 立って, guessit output only borrows parser season/episode from non-low-confidence parses, duplicate auto-start no longer releases an active pause-until-ready gate, JLPT CSS tests block text-decoration shorthand underlines, post-watch update rejection logging is covered, and duplicate quit-on-disconnect predicate code is shared. + +Verification: bun test src/anki-integration/card-creation-manual-update.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/anilist/anilist-updater.test.ts src/main/runtime/mpv-main-event-actions.test.ts src/renderer/subtitle-render.test.ts; lua scripts/test-plugin-start-gate.lua; bun run typecheck; bun run test:fast. + diff --git a/backlog/tasks/task-332 - Fix-subtitle-frequency-annotation-missing-ranks-shown-in-Yomitan-popup.md b/backlog/tasks/task-332 - Fix-subtitle-frequency-annotation-missing-ranks-shown-in-Yomitan-popup.md new file mode 100644 index 00000000..53fc6882 --- /dev/null +++ b/backlog/tasks/task-332 - Fix-subtitle-frequency-annotation-missing-ranks-shown-in-Yomitan-popup.md @@ -0,0 +1,60 @@ +--- +id: TASK-332 +title: Fix subtitle frequency annotation missing ranks shown in Yomitan popup +status: Done +assignee: + - Codex +created_date: '2026-05-04 03:29' +updated_date: '2026-05-04 03:41' +labels: + - bug + - tokenizer +dependencies: [] +priority: medium +--- + +## Description + + +Subtitle frequency highlighting can miss a token even when the Yomitan popup shows a rank within the configured threshold. Reproduced with `第二走者とアンカーは\n中継地点に速やかに移動!`: Yomitan popup shows `第二` JPDB rank 1820, but SubMiner tokenizer output has no `frequencyRank` for `第二`, so renderer cannot annotate it. + + +## Acceptance Criteria + +- [x] #1 `第二` in `第二走者とアンカーは\n中継地点に速やかに移動!` receives the Yomitan rank shown by the popup when frequency highlighting is enabled. +- [x] #2 Regression test covers the Yomitan scan/frequency ingestion path for exact popup-derived ranks. +- [x] #3 Existing tokenizer frequency tests continue to pass. + + +## Implementation Plan + + +1. Reproduce and inspect the missing `第二` rank path with tokenizer probes and focused tests. +2. Preserve exact Yomitan scan frequency ranks when the matching frequency entry omits reading metadata but has the same exact term. +3. Allow ranked ordinal prefix-noun tokens (`第` + numeric noun, e.g. `第二`) through annotation POS filtering while keeping standalone prefixes excluded. +4. Verify with focused tokenizer/runtime/annotation tests, typecheck, changelog lint, and a live-style Yomitan profile probe. + + +## Implementation Notes + + +Root-cause probe against temp copy of Yomitan profile: tokenizer returns no frequencyRank for `第二`; renderer config `topX` is 10000, so render threshold is not the blocker. + +User approved implementation plan on 2026-05-04. + +Verification: `bun test src/core/services/tokenizer.test.ts src/core/services/tokenizer/yomitan-parser-runtime.test.ts src/core/services/tokenizer/annotation-stage.test.ts` passed (192 tests). + +Verification: `bun run typecheck` passed. + +Verification: `bun run changelog:lint` passed. + +Verification: `bun run get-frequency:electron -- --yomitan-user-data /tmp/subminer-yomitan-probe-909423 "第二走者とアンカーは\\n中継地点に速やかに移動!"` produced `第二` with `frequencyRank: 1820`. + +Finalization check: implementation plan updated to reflect the discovered POS-filter root cause and completed solution. + + +## Final Summary + + +Fixed subtitle frequency annotation for `第二` by allowing ranked ordinal prefix-noun compounds through annotation POS filtering. Also made scan rank matching tolerate exact frequency entries where one side omits reading metadata. Verified with tokenizer/runtime/annotation tests, typecheck, changelog lint, and a live-style Yomitan profile probe showing `第二` now receives frequencyRank 1820. + diff --git a/backlog/tasks/task-333 - Suppress-aru-subtitle-annotations.md b/backlog/tasks/task-333 - Suppress-aru-subtitle-annotations.md new file mode 100644 index 00000000..293fc8f1 --- /dev/null +++ b/backlog/tasks/task-333 - Suppress-aru-subtitle-annotations.md @@ -0,0 +1,53 @@ +--- +id: TASK-333 +title: Suppress aru subtitle annotations +status: Done +assignee: [] +created_date: '2026-05-04 04:39' +updated_date: '2026-05-04 05:02' +labels: + - tokenizer + - annotations + - bug +dependencies: [] +priority: medium +--- + +## Description + + +Add `ある` / `有る` to the subtitle annotation suppression path so `aru` tokens remain hoverable and never receive N+1, JLPT, frequency, or name-match annotation metadata. Known-word highlighting is special: if a filtered `aru` token is known and known highlighting is enabled, it should still render as known. + + +## Acceptance Criteria + +- [x] #1 `ある` and kanji headword/surface variants such as `有る` are excluded by the subtitle annotation filter. +- [x] #2 Annotation stripping clears N+1, JLPT, frequency, and name metadata for `aru` tokens while preserving token hover data. +- [x] #3 Known-word highlighting still applies to filtered tokens, including `aru`, when known-word lookup marks them known. +- [x] #4 Regression coverage fails before the fix and passes after. + + +## Implementation Plan + + +1. Add `ある`/`有る`/`在る` to the shared subtitle annotation hard-exclusion terms. +2. Preserve/recompute known-word status for filtered tokens while stripping N+1, JLPT, frequency, and name metadata. +3. Add RED/GREEN unit and tokenizer regression coverage, plus a changelog fragment. +4. Run targeted tests and full handoff gate. + + +## Implementation Notes + + +TDD path: added failing annotation-stage coverage first. Initial implementation made targeted tests pass, then broader tokenizer coverage revealed an older fixture expecting `ある` to remain lexical; updated that integration expectation to the new requested behavior. Follow-up correction: known-word highlighting is the lone annotation exception for filtered tokens, so the strip path now preserves known state and `annotateTokens` recomputes known status for filtered tokens while still clearing N+1/JLPT/frequency/name metadata. + + +## Final Summary + + +Suppressed non-known subtitle annotations for `aru` existence verbs by adding `ある`, `有る`, and `在る` to the shared hard-exclusion list. Corrected the filtered-token path so known-word highlighting still applies whenever known highlighting is enabled; filtered tokens now keep/gain `isKnown` but still lose N+1, JLPT, frequency, and name metadata. + +Added and updated annotation-stage and tokenizer regression coverage for `aru`, particles, helper fragments, interjections, and other filtered known tokens. Added `changes/333-aru-annotation-filter.md`. + +Validation passed: RED failures observed before implementation/correction; `bun test src/core/services/tokenizer/annotation-stage.test.ts`; `bun test src/core/services/tokenizer.test.ts`; `bun run typecheck`; `bun run format:check:src`; `bun run changelog:lint`; `bun run test:fast`; `bun run test:env`; `bun run build`; `bun run test:smoke:dist`. + diff --git a/backlog/tasks/task-334 - Assess-and-address-PR-57-latest-CodeRabbit-comments.md b/backlog/tasks/task-334 - Assess-and-address-PR-57-latest-CodeRabbit-comments.md new file mode 100644 index 00000000..e1ef92a3 --- /dev/null +++ b/backlog/tasks/task-334 - Assess-and-address-PR-57-latest-CodeRabbit-comments.md @@ -0,0 +1,53 @@ +--- +id: TASK-334 +title: Assess and address PR 57 latest CodeRabbit comments +status: Done +assignee: + - '@codex' +created_date: '2026-05-04 05:03' +updated_date: '2026-05-04 05:07' +labels: + - pr-feedback + - coderabbit +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/57' +priority: medium +--- + +## Description + + +Assess the latest CodeRabbit review on PR #57 submitted 2026-05-04 and fix verified issues. Current scope: AniList post-watch duplicate-write race, known-word cache mutation return value, and manual-mark AniList rejection isolation with regression coverage. + + +## Acceptance Criteria + +- [x] #1 Each latest CodeRabbit comment is either fixed or explicitly assessed as not applicable against current code. +- [x] #2 Regression tests cover behavior changes where practical. +- [x] #3 Relevant focused tests and typecheck pass, or any blocked verification is documented. + + +## Implementation Plan + + +1. Verify each latest CodeRabbit finding against current code. +2. Update known-word cache append return semantics so cache clears are reported as mutations when state existed. +3. Acquire AniList post-watch in-flight before async gating and release in finally. +4. Isolate manual-mark AniList callback failures in IPC and add a rejection-path regression test. +5. Run focused tests for touched areas plus typecheck; document any blocked verification. + + +## Implementation Notes + + +Verified latest CodeRabbit review submitted 2026-05-04 on PR #57. Fixed all three current items: known-word cache mutation return after cache reset, AniList post-watch concurrent in-flight race, and manual watched mark isolation from AniList callback failures. Added regression tests for each path and a changelog fragment. + + +## Final Summary + + +Fixed latest PR #57 CodeRabbit feedback by reporting known-word cache clears as mutations during immediate append, acquiring AniList post-watch in-flight before awaited gates to prevent duplicate writes, and isolating manual watched mark success from AniList post-watch callback failures. Added focused regression coverage in known-word cache, AniList post-watch, and IPC tests, plus a changelog fragment. + +Verification: bun test src/anki-integration/known-word-cache.test.ts; bun test src/main/runtime/anilist-post-watch.test.ts; bun test src/core/services/ipc.test.ts; bun run typecheck; bun run format:check:src; bun run changelog:lint; bun run test:fast; bun run test:env; bun run build; bun run test:smoke:dist. + diff --git a/backlog/tasks/task-335 - Fix-Linux-AniList-setup-gate-using-stored-keyring-token.md b/backlog/tasks/task-335 - Fix-Linux-AniList-setup-gate-using-stored-keyring-token.md new file mode 100644 index 00000000..d1ebca86 --- /dev/null +++ b/backlog/tasks/task-335 - Fix-Linux-AniList-setup-gate-using-stored-keyring-token.md @@ -0,0 +1,39 @@ +--- +id: TASK-335 +title: Fix Linux AniList setup gate using stored keyring token +status: Done +assignee: [] +created_date: '2026-05-04 05:26' +updated_date: '2026-05-04 05:30' +labels: + - anilist + - bug + - linux +dependencies: [] +priority: high +--- + +## Description + + +AniList setup page reopens on Linux video launch even when the token exists in secret storage and post-watch updates can use it. Investigate setup gating versus update token refresh paths and make them agree on stored-token availability. + + +## Acceptance Criteria + +- [x] #1 Launching a video on Linux with an AniList token available in secret storage does not show the AniList setup page just because config accessToken is empty. +- [x] #2 If secret storage load fails, setup/errors surface the underlying storage problem instead of behaving like an empty token. +- [x] #3 Regression coverage exercises the setup-gate token availability path and preserves post-watch update token behavior. + + +## Implementation Notes + + +Patched AniList setup callback to require successful token persistence before caching/closing the setup flow. Patched config reload auth refresh to pass allowSetupPrompt:false so normal startup/playback reloads do not open AniList setup UI. Added regression coverage around persistence failure and non-prompting config refresh. + + +## Final Summary + + +Fixed AniList setup/login flow so failed encrypted token persistence no longer reports success or seeds only an in-memory token. Config reload now refreshes AniList auth state without opening the setup window during playback, reducing repeated Linux setup prompts when safeStorage/keyring resolution fails. + diff --git a/backlog/tasks/task-337 - Fix-transient-Linux-safeStorage-failure-poisoning-AniList-token-store.md b/backlog/tasks/task-337 - Fix-transient-Linux-safeStorage-failure-poisoning-AniList-token-store.md new file mode 100644 index 00000000..6e534600 --- /dev/null +++ b/backlog/tasks/task-337 - Fix-transient-Linux-safeStorage-failure-poisoning-AniList-token-store.md @@ -0,0 +1,39 @@ +--- +id: TASK-337 +title: Fix transient Linux safeStorage failure poisoning AniList token store +status: Done +assignee: [] +created_date: '2026-05-04 05:51' +updated_date: '2026-05-04 05:52' +labels: + - anilist + - bug + - linux +dependencies: [] +priority: high +--- + +## Description + + +AniList token store memoizes a false safeStorage availability result. On Linux this can happen before Electron/keyring readiness, causing later post-watch updates and setup saves to report missing login/encryption unavailable even after the keyring is available. + + +## Acceptance Criteria + +- [x] #1 A transient safeStorage unavailable result does not prevent a later stored AniList token load once encryption is available. +- [x] #2 A transient safeStorage unavailable result does not prevent a later AniList token save once encryption is available. +- [x] #3 Regression coverage protects the retry behavior. + + +## Implementation Notes + + +Changed AniList token store safeStorage probe to memoize successful probes only. Failed probes now return false without poisoning later load/save attempts, covering Linux startup windows where Electron safeStorage/keyring can be unavailable before app readiness but usable later. Added regression test for transient unavailable -> available load/save retry. + + +## Final Summary + + +Fixed a Linux AniList auth failure where an early safeStorage/keyring miss was cached for the whole process. Stored tokens now load and setup tokens can save after GNOME libsecret becomes available later in startup. + diff --git a/backlog/tasks/task-338 - Fix-known-word-highlight-on-standalone-subtitle-particles.md b/backlog/tasks/task-338 - Fix-known-word-highlight-on-standalone-subtitle-particles.md new file mode 100644 index 00000000..e8432e9c --- /dev/null +++ b/backlog/tasks/task-338 - Fix-known-word-highlight-on-standalone-subtitle-particles.md @@ -0,0 +1,72 @@ +--- +id: TASK-338 +title: Fix known-word highlight on standalone subtitle particles +status: Done +assignee: + - codex +created_date: '2026-05-04 05:52' +updated_date: '2026-05-04 05:57' +labels: + - bug + - subtitle + - tokenizer +dependencies: [] +references: + - src/core/services/tokenizer/annotation-stage.ts + - src/core/services/tokenizer/subtitle-annotation-filter.ts + - src/renderer/subtitle-render.ts +priority: medium +--- + +## Description + + +Standalone grammar particles such as に should not render as known-word green when they appear in the known-word cache as readings for other words. Keep known-word coloring for lexical tokens, but prevent grammar-excluded subtitle tokens from getting known-green. + + +## Acceptance Criteria + +- [x] #1 Standalone grammar particles like に do not retain isKnown after subtitle annotation filtering. +- [x] #2 Lexical known-word tokens still render as known when not grammar-excluded. +- [x] #3 Focused regression test covers the particle false-positive path. + + +## Implementation Plan + + +1. Add a focused regression in `src/core/services/tokenizer/annotation-stage.test.ts` showing standalone particle `に` is grammar-excluded and does not retain `isKnown` even when `isKnownWord('に')` is true. +2. Run the focused tokenizer annotation test and confirm the new test fails for the current behavior. +3. Patch `src/core/services/tokenizer/annotation-stage.ts` so grammar-excluded tokens clear known status while still stripping N+1/frequency/JLPT/name metadata. +4. Run the focused test file, then inspect diff and update task acceptance criteria. + + +## Implementation Notes + + +Implemented tokenizer annotation filtering so grammar-excluded subtitle tokens clear known-word status instead of retaining green known coloring. Added focused regression for known-word-cache particle false positive and updated existing expectations for unified annotation clearing. Verification: `bun test src/core/services/tokenizer/annotation-stage.test.ts --test-name-pattern "clears known status from standalone particles"` failed before the production patch; after patch, `bun test src/core/services/tokenizer/annotation-stage.test.ts`, `bun test src/core/services/tokenizer.test.ts`, combined tokenizer tests, `bun run typecheck`, `bun run changelog:lint`, and `bun run test:fast` passed. + +Full handoff gate follow-up: `bun run test:env` and `bun run build` passed. `bun run test:smoke:dist` failed outside this tokenizer change in `dist/core/services/overlay-manager.test.js` because current dirty overlay-window code calls `window.getTitle()` on a test mock that does not provide it. + + +## Final Summary + + +Summary: +- Cleared `isKnown` for grammar-excluded subtitle tokens in the tokenizer annotation stage, preventing standalone particles such as `に` from rendering as known just because a known-word deck contains a matching reading. +- Added a focused regression test for the known-word-cache false positive and updated tokenizer expectations so helper/grammar spans consistently clear all subtitle annotations. +- Added changelog fragment `changes/338-known-word-particle-highlights.md`. + +Verification: +- `bun test src/core/services/tokenizer/annotation-stage.test.ts --test-name-pattern "clears known status from standalone particles"` failed before the production patch. +- `bun test src/core/services/tokenizer/annotation-stage.test.ts` +- `bun test src/core/services/tokenizer.test.ts` +- `bun test src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer.test.ts` +- `bun run typecheck` +- `bun run changelog:lint` +- `bun run test:fast` +- `bun run test:env` +- `bun run build` + +Blocked/External: +- `bun run test:smoke:dist` currently fails outside this tokenizer change in `dist/core/services/overlay-manager.test.js`: dirty overlay-window code calls `window.getTitle()` on a test mock without that method. + diff --git a/backlog/tasks/task-340 - Restore-default-replay-and-next-subtitle-overlay-keybindings.md b/backlog/tasks/task-340 - Restore-default-replay-and-next-subtitle-overlay-keybindings.md new file mode 100644 index 00000000..de6ab702 --- /dev/null +++ b/backlog/tasks/task-340 - Restore-default-replay-and-next-subtitle-overlay-keybindings.md @@ -0,0 +1,65 @@ +--- +id: TASK-340 +title: Restore default replay and next subtitle overlay keybindings +status: Done +assignee: + - Codex +created_date: '2026-05-04 06:25' +updated_date: '2026-05-04 06:49' +labels: + - bug + - keybindings + - overlay + - mpv +dependencies: [] +priority: high +--- + +## Description + + +Default overlay/mpv keybindings for replaying the current subtitle line and playing the next subtitle line are not firing. Shift+H and Shift+L subtitle jumps still work, but Ctrl+Shift+H should replay the current subtitle and pause at subtitle end, and Ctrl+Shift+L should play the next subtitle and pause at subtitle end. Keep the other built-in defaults working. + + +## Acceptance Criteria + +- [x] #1 Default keybindings include working replay-current-subtitle and play-next-subtitle bindings on Ctrl+Shift+H and Ctrl+Shift+L. +- [x] #2 Replay-current-subtitle dispatch reaches the existing runtime path that pauses at the subtitle end. +- [x] #3 Play-next-subtitle dispatch reaches the existing runtime path that pauses at the subtitle end. +- [x] #4 Existing default keybindings continue to compile/register without regressions. +- [x] #5 Focused regression tests cover the broken default bindings. + + +## Implementation Plan + + +1. Add focused regression coverage that the resolved defaults compile on Linux without dropping Ctrl+Shift+H/L, and that those keys map to replayCurrentSubtitle/playNextSubtitle session actions. +2. Move the default session-help shortcut off Ctrl/Cmd+Shift+H to a non-conflicting shortcut, then update generated/default config docs so shipped defaults match documentation. +3. Add/adjust coverage for default replay/next bindings and run targeted Bun tests plus plugin session-binding smoke. + +4. Follow-up after live test: fix the mpv plugin shifted-letter key-name conversion so `Ctrl+Shift+KeyL` registers using mpv's uppercase letter form and add Lua regression coverage for both `Ctrl+Shift+L` and `Shift+L`. + + +## Implementation Notes + + +Root cause: default `shortcuts.openSessionHelp = CommandOrControl+Shift+H` canonicalized to `ctrl+shift+KeyH` on Linux/Windows, conflicting with the built-in replay-current-subtitle keybinding. The session-binding compiler drops conflicted bindings, so replay did not register. Moved default session help to `CommandOrControl+Slash` and added regression coverage that defaults compile without a conflict and keep replay/next actions on `Ctrl+Shift+H/L`. + +Follow-up from live test: `Ctrl+Shift+H` works after resolving the help shortcut conflict, but `Ctrl+Shift+L` still behaves like native/other `Ctrl+L`. Investigating mpv/plugin key-name generation for shifted letter chords. + +Follow-up fix: mpv normalizes shifted letter chords to uppercase letter key names (for example `Ctrl+Shift+l` becomes `Ctrl+L`). The plugin previously emitted `Ctrl+Shift+l`, which let live `Ctrl+Shift+L` fall through as the `Ctrl+L` key path. `plugin/subminer/session_bindings.lua` now emits uppercase letters and omits the Shift modifier for shifted `Key[A-Z]` bindings. Lua regression coverage now checks `Ctrl+Shift+KeyL -> Ctrl+L`, `Shift+KeyL -> L`, and the play-next CLI dispatch. + +Second live follow-up: `Ctrl+Shift+L` routed to play-next but still behaved like `Shift+L` when playback was already paused because `MpvIpcClient.playNextSubtitle()` explicitly cleared `pendingPauseAtSubEnd` and only sent `sub-seek 1` in paused state. Changed play-next to always arm pause-at-sub-end, clear stale pause target, seek to next subtitle, and unpause when currently paused. Existing sub-end/time-pos handling then pauses at the next subtitle end. + + +## Final Summary + + +Changed the default session-help shortcut from `CommandOrControl+Shift+H` to `CommandOrControl+Slash` so `Ctrl+Shift+H` remains available for replay-current-subtitle and `Ctrl+Shift+L` remains available for play-next-subtitle. Updated config examples, docs-site shortcut/config/usage docs, and added changelog fragment `changes/340-default-subtitle-keybindings.md`. + +Fixed both follow-up issues from live testing. First, the mpv plugin key-name converter now uses mpv's uppercase key form for shifted letter bindings (`Ctrl+Shift+KeyL` registers as `Ctrl+L`, `Shift+KeyL` as `L`). Second, `MpvIpcClient.playNextSubtitle()` now starts playback even when mpv is paused, keeps the pause-at-sub-end path armed, and lets existing subtitle-end timing pause again at the next subtitle end. + +Regression coverage now includes compiled default bindings, Lua plugin shifted-letter registration/CLI dispatch, and paused-state play-next behavior. + +Verification passed: targeted Bun session/mpv/protocol tests, `bun run test:plugin:src`, `bun run changelog:lint`, `bun run build`, and `bun run test:smoke:dist`. Earlier full gate also passed before the follow-ups: `bun run typecheck`, `bun run test:fast`, `bun run test:env`, docs/config checks, and dist smoke. + diff --git a/backlog/tasks/task-341 - Fix-frequency-highlight-for-honorific-prefix-noun-tokens.md b/backlog/tasks/task-341 - Fix-frequency-highlight-for-honorific-prefix-noun-tokens.md new file mode 100644 index 00000000..e6362ade --- /dev/null +++ b/backlog/tasks/task-341 - Fix-frequency-highlight-for-honorific-prefix-noun-tokens.md @@ -0,0 +1,66 @@ +--- +id: TASK-341 +title: Fix frequency highlight for honorific prefix noun tokens +status: Done +assignee: + - codex +created_date: '2026-05-05 02:08' +updated_date: '2026-05-05 02:10' +labels: + - bug + - tokenizer + - frequency +dependencies: [] +documentation: + - docs/architecture/2026-03-15-renderer-performance-design.md +priority: high +--- + +## Description + + +User reported subtitle token `ご機嫌` in `(フランク)ご機嫌が良くないようだな アンドリュー` shows Yomitan/JPDB rank 5484 in popup but is not highlighted as frequent. Frequency annotation currently excludes merged tokens containing default-excluded POS parts such as `接頭詞`; ordinal prefix-noun tokens already have an exception. Desired outcome: honorific prefix + noun lexical tokens like `ご機嫌` keep their valid frequency rank so renderer can apply frequent-token styling, while standalone prefixes and noisy merged grammar fragments remain excluded. + + +## Acceptance Criteria + +- [x] #1 `ご機嫌`-style honorific prefix + noun tokens retain a finite frequency rank after annotation/tokenization when frequency highlighting is enabled. +- [x] #2 Standalone prefix/noise tokens remain excluded from frequency annotation. +- [x] #3 Regression test covers the reported `ご機嫌` rank 5484 behavior. +- [x] #4 Relevant tokenizer/annotation tests pass. + + +## Implementation Plan + + +1. Add a failing regression around honorific prefix + noun token frequency retention, using `ご機嫌` with rank 5484 and POS `接頭詞|名詞` / `名詞接続|一般`. +2. Implement a narrow annotation-stage exception for lexical honorific prefix-noun tokens, adjacent to the existing ordinal prefix-noun allowance. +3. Verify standalone prefix/noise exclusion behavior remains covered. +4. Run targeted tokenizer/annotation tests and update acceptance criteria/final notes. + + +## Implementation Notes + + +TDD red verified: `bun test src/core/services/tokenizer.test.ts -t "honorific prefix-noun"` failed with `actual: undefined`, `expected: 5484` before implementation. + +Implemented a narrow honorific prefix-noun frequency allowance for merged `お`/`ご`/`御` + noun tokens with POS `接頭詞|名詞` and prefix POS2 `名詞接続`. Existing standalone prefix/noise exclusion tests still pass. + +Verification: `bun test src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts` passed (164 tests); `bun run typecheck` passed; `bunx prettier --check src/core/services/tokenizer/annotation-stage.ts src/core/services/tokenizer.test.ts` passed. Repo-wide `bun run format:check:src` still fails on pre-existing `src/core/services/stats-window.ts` formatting. + + +## Final Summary + + +Fixed frequency annotation for lexical honorific prefix-noun tokens such as `ご機嫌`. The annotation filter now allows merged `お`/`ご`/`御` prefix + noun tokens with MeCab POS `接頭詞|名詞` / `名詞接続|...` to retain a valid frequency rank, while standalone prefixes and existing noise filters remain excluded. + +Added a tokenizer regression for the reported `ご機嫌` case asserting rank `5484` is preserved after MeCab enrichment and annotation. + +Verification: +- `bun test src/core/services/tokenizer.test.ts -t "honorific prefix-noun"` failed before the fix with `undefined` vs `5484`, then passed after the fix. +- `bun test src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts` passed (164 tests). +- `bun run typecheck` passed. +- `bunx prettier --check src/core/services/tokenizer/annotation-stage.ts src/core/services/tokenizer.test.ts` passed. + +Note: repo-wide `bun run format:check:src` currently fails on unrelated existing formatting in `src/core/services/stats-window.ts`. + diff --git a/backlog/tasks/task-343 - Fix-macOS-character-dictionary-selector-session-shortcut.md b/backlog/tasks/task-343 - Fix-macOS-character-dictionary-selector-session-shortcut.md new file mode 100644 index 00000000..31e32d86 --- /dev/null +++ b/backlog/tasks/task-343 - Fix-macOS-character-dictionary-selector-session-shortcut.md @@ -0,0 +1,44 @@ +--- +id: TASK-343 +title: Fix macOS character dictionary selector session shortcut +status: Done +assignee: [] +created_date: '2026-05-11 08:05' +updated_date: '2026-05-11 08:06' +labels: + - bug + - macos + - character-dictionary + - plugin +dependencies: [] +modified_files: + - plugin/subminer/session_bindings.lua + - scripts/test-plugin-session-bindings.lua +priority: medium +ordinal: 181500 +--- + +## Description + + +Opening the character dictionary AniList selector from mpv/session shortcuts should work on macOS. Current generated session bindings include the openCharacterDictionary session action, but the Lua plugin CLI dispatch table does not map that action to the app flag, so the shortcut cannot reach the runtime selector. + + +## Acceptance Criteria + +- [x] #1 The openCharacterDictionary session action invokes the app with --open-character-dictionary from the mpv plugin. +- [x] #2 Regression coverage proves the Lua session-binding CLI map forwards openCharacterDictionary correctly. +- [x] #3 Existing session-binding regression coverage still passes. + + +## Implementation Notes + + +TDD red: `lua scripts/test-plugin-session-bindings.lua` failed because openCharacterDictionary did not emit --open-character-dictionary. Green after adding the missing Lua CLI mapping. + + +## Final Summary + + +Fixed the mpv plugin session action mapping so the character dictionary selector shortcut dispatches `--open-character-dictionary` to the app. Added Lua regression coverage for the macOS-style Alt+Meta+A binding and verified adjacent TypeScript session binding tests. + diff --git a/backlog/tasks/task-344 - Fix-macOS-overlay-tracker-hiding-while-mpv-remains-active.md b/backlog/tasks/task-344 - Fix-macOS-overlay-tracker-hiding-while-mpv-remains-active.md new file mode 100644 index 00000000..38cbc2e2 --- /dev/null +++ b/backlog/tasks/task-344 - Fix-macOS-overlay-tracker-hiding-while-mpv-remains-active.md @@ -0,0 +1,74 @@ +--- +id: TASK-344 +title: Fix macOS overlay tracker hiding while mpv remains active +status: Done +assignee: + - codex +created_date: '2026-05-11 08:27' +updated_date: '2026-05-11 08:41' +labels: + - bug + - macos + - overlay +dependencies: [] +references: + - src/main/runtime/overlay-visibility-runtime.ts + - src/window-trackers +modified_files: + - src/core/services/overlay-visibility.ts + - src/core/services/overlay-visibility.test.ts + - changes/344-macos-overlay-active-mpv.md +priority: high +ordinal: 182500 +--- + +## Description + + +macOS playback overlay should match Windows behavior: the tracker may only hide or alter overlay layering when mpv is no longer the active playback window. When mpv remains topmost or fullscreen, the visible overlay must stay present and interactive unless the user manually hides it or minimizes mpv. + + +## Acceptance Criteria + +- [x] #1 With mpv active on macOS, window-tracker updates do not hide the visible overlay or make it click-through. +- [x] #2 With mpv fullscreen on macOS, tracker geometry/layering refreshes preserve overlay interactivity. +- [x] #3 Overlay visibility still changes when mpv is no longer active, and manual hide/minimize behavior remains intact. +- [x] #4 A regression test covers the macOS active-mpv path that previously produced an overlay loading OSD and non-interactive overlay. + + +## Implementation Plan + + +1. Add a focused regression in `src/core/services/overlay-visibility.test.ts` for macOS with mpv tracked/focused and retained geometry: visibility refresh must not hide the overlay, must not emit loading OSD, and must leave the overlay interactive (`setIgnoreMouseEvents(false)`) unless forced passthrough is active. +2. Update `src/core/services/overlay-visibility.ts` so macOS tracker refreshes preserve the visible overlay while mpv is active/fullscreen, and only hide/re-layer for explicit manual hide, minimized/untracked target, or non-active mpv cases. +3. Run the focused overlay visibility tests, then a broader fast gate if the focused fix is green. + + +## Implementation Notes + + +Implemented in the shared overlay visibility service. macOS now keeps tracked/focused mpv overlays interactive instead of defaulting to mouse passthrough, and preserves an already visible active-mpv overlay during temporary tracker-not-ready refreshes without showing the loading OSD. Forced passthrough, modal hide, manual hide, Windows minimized handling, and initial macOS tracker-not-ready startup behavior remain covered by tests. + +Verification: `bun test src/core/services/overlay-visibility.test.ts` passed; affected overlay/mouse/runtime group passed; `bun run typecheck`, `bun run changelog:lint`, `bun run build`, `bun run test:env`, and `bun run test:smoke:dist` passed. `bun run test:fast` is blocked by existing cross-file test pollution: `src/core/services/subsync.test.ts` passes alone, but fails when run after `src/renderer/handlers/keyboard.test.ts` because `window.electronAPI` is undefined in a lingering keyboard handler; Bun then reports nested node:test errors for later files. `bun run format:check:src` is blocked by pre-existing formatting drift in `src/core/services/stats-window.ts`; touched files pass direct Prettier check. + + +## Final Summary + + +Summary: +- Updated macOS overlay visibility logic so a tracked/focused mpv window keeps the visible overlay interactive instead of click-through. +- Preserved an already visible active-mpv overlay during temporary macOS tracker-not-ready refreshes, avoiding the loading OSD/hide path for that active playback case. +- Added regression coverage for active mpv tracker refreshes and transient tracker-not-ready refreshes, plus updated old macOS expectations to the new active-mpv contract. +- Added a changelog fragment for the user-visible overlay fix. + +Verification: +- Passed: `bun test src/core/services/overlay-visibility.test.ts` +- Passed: `bun test src/core/services/overlay-visibility.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/overlay-shortcut-handler.test.ts src/renderer/handlers/mouse.test.ts` +- Passed: `bun run typecheck` +- Passed: `bun run changelog:lint` +- Passed: `bun run build` +- Passed: `bun run test:env` +- Passed: `bun run test:smoke:dist` +- Blocked: `bun run test:fast` by existing keyboard/subsync cross-file global pollution; isolated `bun test src/core/services/subsync.test.ts` passes. +- Blocked: `bun run format:check:src` by pre-existing formatting drift in `src/core/services/stats-window.ts`; touched files pass direct Prettier check. + diff --git a/backlog/tasks/task-345 - Address-PR-57-latest-CodeRabbit-review-comments.md b/backlog/tasks/task-345 - Address-PR-57-latest-CodeRabbit-review-comments.md new file mode 100644 index 00000000..8a479553 --- /dev/null +++ b/backlog/tasks/task-345 - Address-PR-57-latest-CodeRabbit-review-comments.md @@ -0,0 +1,70 @@ +--- +id: TASK-345 +title: Address PR 57 latest CodeRabbit review comments +status: Done +assignee: + - codex +created_date: '2026-05-12 06:35' +updated_date: '2026-05-12 06:38' +labels: + - pr-review + - coderabbit +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/57' +priority: medium +--- + +## Description + + +Assess the 2026-05-11 CodeRabbit review on PR #57 and address still-valid actionable comments with minimal changes. Current comments cover mpv subtitle playback unpause behavior, parser-selection empty reading handling, and overlay focus selector accessibility. + + +## Acceptance Criteria + +- [x] #1 Still-valid CodeRabbit comments from the latest PR #57 review are fixed or explicitly documented as skipped with rationale. +- [x] #2 Regression coverage is added for behavior-affecting fixes where practical before production code changes. +- [x] #3 Relevant targeted checks pass locally. + + +## Implementation Plan + + +1. Inspect current code and nearby tests for the three latest CodeRabbit comments on PR #57. +2. Add regression tests first for behavior-impacting findings: mpv playNextSubtitle unpauses when pause state is unknown, and parser selection preserves combined reading for empty segment readings. +3. Apply minimal production fixes plus the CSS :focus-visible selector change. +4. Run targeted test commands for touched areas and update task notes/final status. + + +## Implementation Notes + + +Added red tests for the two behavior comments. `bun test src/core/services/mpv.test.ts` fails because playNextSubtitle skips unpause when pause state is null. `bun test src/core/services/tokenizer/parser-selection-stage.test.ts` fails because empty grammar-ending reading clears preceding combined reading. + +Implemented all three latest CodeRabbit findings. Added regressions for mpv unknown pause state and parser-selection empty reading handling; changed overlay focus selectors to :focus-visible. Also fixed existing Prettier failure in src/core/services/stats-window.ts. Verification passed: targeted tests, typecheck, test:fast, test:env, format:check:src, build, test:smoke:dist, changelog:lint, changelog:pr-check. + + +## Final Summary + + +Addressed the latest CodeRabbit comments on PR #57. + +Changes: +- `playNextSubtitle` now sends `pause=false` unless mpv is explicitly known to be playing, covering startup/reconnect unknown pause state. +- Parser selection no longer slices combined readings for empty grammar-ending readings, preserving preceding token readings. +- Overlay focus suppression now targets `:focus-visible` selectors. +- Applied Prettier to `src/core/services/stats-window.ts` to clear the existing formatting gate failure. + +Verification: +- `bun test src/core/services/mpv.test.ts` +- `bun test src/core/services/tokenizer/parser-selection-stage.test.ts` +- `bun run typecheck` +- `bun run test:fast` +- `bun run test:env` +- `bun run format:check:src` +- `bun run build` +- `bun run test:smoke:dist` +- `bun run changelog:lint` +- `bun run changelog:pr-check` + diff --git a/backlog/tasks/task-346 - Fix-stats-session-detail-when-recent-media-is-missing.md b/backlog/tasks/task-346 - Fix-stats-session-detail-when-recent-media-is-missing.md new file mode 100644 index 00000000..ce403790 --- /dev/null +++ b/backlog/tasks/task-346 - Fix-stats-session-detail-when-recent-media-is-missing.md @@ -0,0 +1,57 @@ +--- +id: TASK-346 +title: Fix stats session detail when recent media is missing +status: Done +assignee: + - Codex +created_date: '2026-05-12 06:41' +updated_date: '2026-05-12 06:44' +labels: + - bug + - stats +dependencies: [] +priority: high +--- + +## Description + + +Stats overview can show a completed session, but clicking it opens a detail view that says "Media not found". The details view should resolve the session/media consistently for recently completed local playback so users can inspect session cards, words, timeline, and media stats after a video finishes. + + +## Acceptance Criteria + +- [x] #1 A completed session listed on the stats overview opens a usable details view instead of "Media not found" when its media is still present in stats data. +- [x] #2 Regression coverage reproduces the overview-to-detail lookup mismatch and verifies the corrected behavior. +- [x] #3 Relevant stats/detail documentation is updated if behavior or APIs change. + + +## Implementation Plan + + +1. Add a focused regression test in `src/core/services/immersion-tracker/__tests__/query.test.ts` covering a video/session visible from session summaries before `imm_lifetime_media` exists. +2. Update `getMediaDetail` in `src/core/services/immersion-tracker/query-library.ts` so detail rows can resolve from `imm_videos` plus session metrics when lifetime summary is absent. +3. Run the focused query test lane and update task notes/acceptance criteria. + + +## Implementation Notes + + +Implemented root-cause fix in `getMediaDetail`: media detail now resolves from `imm_videos` plus session metrics when `imm_lifetime_media` is not populated yet, while still preferring lifetime summary totals when available. Added regression test for session-visible/media-detail-missing mismatch. No docs update required because the API shape is unchanged; added changelog fragment `changes/346-stats-session-detail.md`. Verification: `bun test src/core/services/immersion-tracker/__tests__/query.test.ts`, `bun run typecheck`, `bun run format:check:src`, `bun run test:fast`, `bun run changelog:lint`. + + +## Final Summary + + +Summary: +- Fixed stats media detail lookup so sessions visible in overview can open detail even before lifetime media summaries exist. +- Preserved lifetime-summary totals when available and added a regression test for the missing-lifetime case. +- Added `changes/346-stats-session-detail.md` for the user-visible fix. + +Tests: +- `bun test src/core/services/immersion-tracker/__tests__/query.test.ts` +- `bun run typecheck` +- `bun run format:check:src` +- `bun run test:fast` +- `bun run changelog:lint` + diff --git a/backlog/tasks/task-347 - Address-PR-57-CodeRabbit-review-round-after-stats-session-fix.md b/backlog/tasks/task-347 - Address-PR-57-CodeRabbit-review-round-after-stats-session-fix.md new file mode 100644 index 00000000..f3e7d685 --- /dev/null +++ b/backlog/tasks/task-347 - Address-PR-57-CodeRabbit-review-round-after-stats-session-fix.md @@ -0,0 +1,61 @@ +--- +id: TASK-347 +title: Address PR 57 CodeRabbit review round after stats session fix +status: Done +assignee: + - codex +created_date: '2026-05-12 07:02' +updated_date: '2026-05-12 09:48' +labels: + - pr-review + - coderabbit + - ci +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/57' +modified_files: + - src/core/services/overlay-window-input.ts + - src/core/services/overlay-window.test.ts +priority: high +--- + +## Description + + +Assess and address the 2026-05-12 CodeRabbit review on PR #57 plus the current red GitHub Actions check. Latest comments cover stats session detail token aggregation, Linux fullscreen overlay refresh scheduling, Hyprland title-event polling, malformed Hyprland monitor JSON handling, and JLPT-lock test coverage for name matches. + + +## Acceptance Criteria + +- [x] #1 Still-valid latest CodeRabbit findings on PR #57 are fixed or documented as skipped with rationale. +- [x] #2 CI failure context is inspected and any repo-relevant failing tests or formatting issues are fixed. +- [x] #3 Regression coverage is added for behavior changes where practical before production edits. +- [x] #4 Relevant local verification passes. + + +## Implementation Plan + + +1. Inspect failing GitHub Actions log and current code around each latest CodeRabbit finding. +2. Add or update focused regression tests first for behavior changes: stats token aggregation, fullscreen refresh exit cancellation, Hyprland monitor parse failure, and title-only event filtering. +3. Apply minimal production fixes for still-valid findings, plus the subtitle-render duplicate test coverage item. +4. Run targeted tests first, then format/typecheck and broader relevant gates; update the task with results. + +Address latest CodeRabbit callback-scope finding: add regression coverage that macOS visible-overlay blur does not invoke onWindowsVisibleOverlayBlur, then split win32/darwin blur handling in src/core/services/overlay-window-input.ts. + + +## Implementation Notes + + +2026-05-12 09:47: Assessed latest PR #57 CodeRabbit comments via gh. Prior review findings in comments are marked addressed by CodeRabbit; latest still-actionable item was macOS visible-overlay blur invoking the Windows-only blur callback in overlay-window-input.ts. + +Added regression coverage showing macOS visible-overlay blur leaves onWindowsVisibleOverlayBlur inactive; test failed before production change and passed after splitting win32/darwin handling. + +Verification: bun test src/core/services/overlay-window.test.ts; bun run typecheck; git diff --check. PR checks: build-test-audit pass, GitGuardian pass, CodeRabbit pass/skipped, Claude skipped. + + +## Final Summary + + +Addressed the latest CodeRabbit callback-scope review on PR #57. Windows visible-overlay blur still invokes onWindowsVisibleOverlayBlur and returns without restacking; macOS visible-overlay blur now returns without restacking and without invoking the Windows-only callback. Added focused regression coverage and verified targeted tests, typecheck, diff whitespace, and current PR checks. + diff --git a/backlog/tasks/task-348 - Fix-PR-57-coverage-CI-focus-chrome-failure.md b/backlog/tasks/task-348 - Fix-PR-57-coverage-CI-focus-chrome-failure.md new file mode 100644 index 00000000..3b2d770c --- /dev/null +++ b/backlog/tasks/task-348 - Fix-PR-57-coverage-CI-focus-chrome-failure.md @@ -0,0 +1,56 @@ +--- +id: TASK-348 +title: Fix PR 57 coverage CI focus chrome failure +status: Done +assignee: + - '@codex' +created_date: '2026-05-12 07:02' +updated_date: '2026-05-12 07:11' +labels: + - ci + - bug +dependencies: [] +references: + - 'https://github.com/ksyasuda/SubMiner/pull/57' + - 'https://github.com/ksyasuda/SubMiner/actions/runs/25718536412' +priority: high +--- + +## Description + + +Investigate and fix current GitHub Actions `build-test-audit` failure on PR #57 (`tokenizer-updates`). CI fails during `bun run test:coverage:src` in the maintained source lane: `renderer stylesheet hides focus chrome on top-level overlay focus targets`. + + +## Acceptance Criteria + +- [x] #1 Root cause of the focus chrome coverage failure is identified from CI/local test output. +- [x] #2 A focused fix is applied without broad unrelated changes. +- [x] #3 Relevant local coverage/test command passes. +- [x] #4 Remote PR check status is rechecked or next CI action is documented. + + +## Implementation Plan + + +1. Reproduce the CI failure locally with `bun test src/renderer/overlay-legacy-cleanup.test.ts`. +2. Update the stale legacy cleanup assertion to expect top-level `:focus-visible` suppression and reject broad `:focus` suppression. +3. Run the targeted test and `bun run test:coverage:src` to match CI's failing lane. +4. Recheck PR checks or document that CI needs a push/rerun. + + +## Implementation Notes + + +CI/local root cause: `src/renderer/style.css` was intentionally changed to `html/body/#overlay:focus-visible`, but `src/renderer/overlay-legacy-cleanup.test.ts` still required broad `:focus` selectors. The stale assertion fails in `test:coverage:src`. + +Additional coverage-lane failure after first fix: `src/main/runtime/linux-mpv-fullscreen-overlay-refresh.test.ts` imported `updateLinuxMpvFullscreenOverlayRefreshBurst`, but `src/main/runtime/linux-mpv-fullscreen-overlay-refresh.ts` did not export/implement it. Added the helper to cancel existing bursts and schedule only while fullscreen is true. + +Verification passed: `bun test src/renderer/overlay-legacy-cleanup.test.ts`; `bun test src/main/runtime/linux-mpv-fullscreen-overlay-refresh.test.ts`; `bun run test:coverage:src`; `bun run format:check:src`. `gh pr checks 57` still reports the old failed `build-test-audit` run at run 25718536412; branch needs push/rerun for remote green. + + +## Final Summary + + +Fixed current PR #57 `build-test-audit` CI blockers. Updated the stale overlay legacy cleanup assertion to expect `:focus-visible` top-level focus suppression and guard against reintroducing broad `:focus` suppression. Added the missing `updateLinuxMpvFullscreenOverlayRefreshBurst` export used by the Linux fullscreen overlay refresh tests. Verification passed locally: focused overlay legacy cleanup test, focused Linux fullscreen refresh test, `bun run test:coverage:src`, and `bun run format:check:src`. Remote PR checks still show the old failed `build-test-audit` run until these local changes are pushed and CI reruns. + diff --git a/backlog/tasks/task-349 - Fix-macOS-overlay-window-ordering-behind-foreground-apps.md b/backlog/tasks/task-349 - Fix-macOS-overlay-window-ordering-behind-foreground-apps.md new file mode 100644 index 00000000..b6ad6cb4 --- /dev/null +++ b/backlog/tasks/task-349 - Fix-macOS-overlay-window-ordering-behind-foreground-apps.md @@ -0,0 +1,74 @@ +--- +id: TASK-349 +title: Fix macOS overlay window ordering behind foreground apps +status: Done +assignee: [] +created_date: '2026-05-12 08:50' +updated_date: '2026-05-12 08:58' +labels: + - bug + - macos + - overlay +dependencies: [] +references: + - src/core/services/overlay-visibility.ts + - src/window-trackers + - TASK-344 +modified_files: + - src/core/services/overlay-visibility.ts + - src/core/services/overlay-visibility.test.ts + - src/core/services/overlay-window-input.ts + - src/core/services/overlay-window.test.ts + - changes/349-macos-overlay-z-order.md +priority: high +ordinal: 183500 +--- + +## Description + + +macOS overlay should stay visually above mpv, but remain grouped with mpv in normal desktop stacking. When another app/window is brought in front of mpv, that window must also appear in front of the overlay, matching Windows behavior. This follows the earlier active-mpv fix that stopped the overlay from hiding while mpv remained foremost. + + +## Acceptance Criteria + +- [x] #1 When mpv is the foreground playback window on macOS, the overlay remains visible above mpv. +- [x] #2 When another application or window is brought in front of mpv on macOS, that foreground window appears above both mpv and the overlay. +- [x] #3 Restoring mpv to the foreground restores the overlay above mpv without requiring a restart. +- [x] #4 Regression coverage documents the macOS stacking relationship and does not regress the prior active-mpv overlay preservation behavior. + + +## Implementation Plan + + +1. Add focused regression coverage for macOS mpv focus loss: the overlay must release its topmost level, remain visible/click-through, and stop enforcing layer order while mpv is behind another window. +2. Add focused blur-handler coverage so the macOS visible overlay does not restack itself when it loses focus. +3. Update overlay visibility and blur handling to use tracker focus as the macOS stacking boundary: focused mpv raises overlay; unfocused mpv releases topmost and skips restack. +4. Run focused overlay tests, formatting, typecheck, changelog lint, env/build/smoke checks; document any blocked broad gate separately. + + +## Implementation Notes + + +Implemented macOS stacking boundary using tracker focus. When tracked mpv is unfocused and the overlay itself is not focused, the visible overlay now releases Electron always-on-top, remains visible/click-through, and skips layer-order enforcement. Visible overlay blur restacking is also skipped on macOS, matching the Windows no-restack path for focus loss. `test:fast` remains blocked by existing cross-file pollution: `keyboard.test.ts` leaves `window.electronAPI` undefined for a later `subsync.test.ts`, causing Bun nested `node:test` errors in subsequent files. + + +## Final Summary + + +Summary: +- Updated macOS overlay visibility so tracked mpv focus controls stacking: focused mpv keeps the overlay raised; unfocused mpv releases topmost while keeping the overlay visible and click-through. +- Stopped macOS visible overlay blur handling from immediately restacking the overlay above unrelated foreground windows. +- Added regression tests for macOS mpv focus loss and macOS blur restacking behavior. +- Added a changelog fragment for the user-visible overlay z-order fix. + +Verification: +- Passed: `bun test src/core/services/overlay-visibility.test.ts src/core/services/overlay-window.test.ts` +- Passed: `bunx prettier --check src/core/services/overlay-visibility.ts src/core/services/overlay-visibility.test.ts src/core/services/overlay-window-input.ts src/core/services/overlay-window.test.ts changes/349-macos-overlay-z-order.md` +- Passed: `bun run typecheck` +- Passed: `bun run changelog:lint` +- Passed: `bun run test:env` +- Passed: `bun run build` +- Passed: `bun run test:smoke:dist` +- Blocked: `bun run test:fast` by existing keyboard/subsync cross-file global pollution; focused and environment tests pass. + diff --git a/backlog/tasks/task-350 - Fix-known-highlighting-for-Yomitan-compound-tokens.md b/backlog/tasks/task-350 - Fix-known-highlighting-for-Yomitan-compound-tokens.md new file mode 100644 index 00000000..77ddb1d6 --- /dev/null +++ b/backlog/tasks/task-350 - Fix-known-highlighting-for-Yomitan-compound-tokens.md @@ -0,0 +1,62 @@ +--- +id: TASK-350 +title: Fix known highlighting for Yomitan compound tokens +status: Done +assignee: + - codex +created_date: '2026-05-12 09:08' +updated_date: '2026-05-12 09:29' +labels: + - bug + - tokenizer +dependencies: [] +modified_files: + - src/core/services/tokenizer/yomitan-parser-runtime.ts + - src/core/services/tokenizer/yomitan-parser-runtime.test.ts + - src/core/services/tokenizer.test.ts + - changes/350-known-yomitan-token-highlighting.md +priority: high +ordinal: 184500 +--- + +## Description + + +Subtitle known-word coloring should respect the lexical token selected by Yomitan. If Yomitan emits a compound or inflected expression as one token, SubMiner must not mark that displayed token known solely because MeCab/POS enrichment can decompose it into known component words. + + +## Acceptance Criteria + +- [x] #1 A Yomitan token such as `取り組んで` with headword `取り組む` remains not-known when only component words like `取る` or `組む` are known. +- [x] #2 Frequency/JLPT/POS enrichment still works for the selected Yomitan token without leaking component known-word status into `isKnown`. +- [x] #3 Regression coverage demonstrates the compound-token case and fails on current behavior before the fix. + + +## Implementation Plan + + +1. Add a regression in `src/core/services/tokenizer.test.ts` for a Yomitan-selected compound token: Yomitan emits `取り組んで` with headword `取り組む`; MeCab splits the same span into component tokens whose headwords include known component words such as `組む`; expected result is one displayed token with `isKnown === false` when only the components are known. +2. Verify the regression fails on current code. +3. Patch MeCab enrichment so it only contributes POS metadata used by annotation filters/exclusions. It must preserve the Yomitan token's `surface`, `headword`, `reading`, offsets, and existing lexical annotation state, especially `isKnown`. +4. Re-run the targeted tokenizer test, then a relevant fast test lane if practical. + +After inspecting code, MeCab enrichment currently only writes POS metadata. The observed component coloring can also come from SubMiner's custom Yomitan scanning path fragmenting a phrase differently than Yomitan's internal parser. Regression should exercise `requestYomitanScanTokens` fallback/parser behavior as seen by `tokenizeSubtitle`, and the fix should prefer Yomitan internal parse token identity while keeping MeCab limited to filtering/POS metadata. + + +## Implementation Notes + + +User clarified MeCab is intended only to help filter unwanted characters/particles/sound effects/etc., not to alter lexical tokenization or known-word decisions. + +Implementation settled on parse-first token identity: `requestYomitanScanTokens` now reads Yomitan internal parse tokens first. It still runs the scanner to keep scanner metadata when spans agree, but returns parse tokens when the scanner fragments the parse token. MeCab remains POS/filter enrichment only. + + +## Final Summary + + +Fixed known-word highlighting for Yomitan compound tokens by preferring Yomitan internal parse token spans over fragmented scanner output. When scanner output agrees with parse spans, scanner metadata such as name-match and word classes is preserved; when it fragments a Yomitan token, the parse token identity wins so known component words do not color the larger unknown token green. + +Added regressions for `取り組んで` with known component words (`取る`, `組む`, `もらう`) and for parser-runtime token selection/metadata behavior. Added a changelog fragment. + +Validation run: `bun test src/core/services/tokenizer.test.ts src/core/services/tokenizer/yomitan-parser-runtime.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts`; `bun run typecheck`; `bun x prettier --check src/core/services/tokenizer.test.ts src/core/services/tokenizer/yomitan-parser-runtime.ts src/core/services/tokenizer/yomitan-parser-runtime.test.ts changes/350-known-yomitan-token-highlighting.md`; `bun run changelog:lint`; `git diff --check`. + diff --git a/changes/305-tokenizer-word-class-pos-filtering.md b/changes/305-tokenizer-word-class-pos-filtering.md new file mode 100644 index 00000000..cb3a3ab9 --- /dev/null +++ b/changes/305-tokenizer-word-class-pos-filtering.md @@ -0,0 +1,6 @@ +type: fixed +area: tokenizer + +- Use Yomitan `wordClasses` metadata for subtitle POS filtering. +- Backfill blank MeCab POS detail fields during parser enrichment. +- Keep subtitle annotation metadata stripped from token results. diff --git a/changes/306-hyprland-fullscreen-overlay.md b/changes/306-hyprland-fullscreen-overlay.md new file mode 100644 index 00000000..187d8603 --- /dev/null +++ b/changes/306-hyprland-fullscreen-overlay.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Fixed Hyprland fullscreen transitions so mpv fullscreen changes refresh visible overlay geometry, reassert topmost stacking, and keep primary subtitle hover pause working after resize/toggle cycles. diff --git a/changes/307-kana-nplusone-targets.md b/changes/307-kana-nplusone-targets.md new file mode 100644 index 00000000..7ea569f9 --- /dev/null +++ b/changes/307-kana-nplusone-targets.md @@ -0,0 +1,4 @@ +type: fixed +area: tokenizer + +- Stopped kana-only subtitle tokens from being selected as N+1 targets. diff --git a/changes/308-jlpt-underlines.md b/changes/308-jlpt-underlines.md new file mode 100644 index 00000000..61d41b17 --- /dev/null +++ b/changes/308-jlpt-underlines.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Restored persistent JLPT subtitle underlines while keeping hover JLPT labels and annotation color priority intact. diff --git a/changes/309-multi-mine-modified-digits.md b/changes/309-multi-mine-modified-digits.md new file mode 100644 index 00000000..98ce24ca --- /dev/null +++ b/changes/309-multi-mine-modified-digits.md @@ -0,0 +1,4 @@ +type: fixed +area: shortcuts + +- Accept follow-up number-row digits for multi-line subtitle mining even when the original shortcut modifiers are still held. diff --git a/changes/311-auxiliary-inflection-annotation-filter.md b/changes/311-auxiliary-inflection-annotation-filter.md new file mode 100644 index 00000000..61982ac4 --- /dev/null +++ b/changes/311-auxiliary-inflection-annotation-filter.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Suppressed subtitle annotation styling for standalone auxiliary inflection fragments such as `れる` and `れた` while keeping lexical `くれる` forms eligible for lookup metadata. diff --git a/changes/312-grammar-ending-annotation-filter.md b/changes/312-grammar-ending-annotation-filter.md new file mode 100644 index 00000000..eade8503 --- /dev/null +++ b/changes/312-grammar-ending-annotation-filter.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Suppressed subtitle annotation styling for grammar-only endings such as `じゃないですか` and standalone polite copula tails like `です` / `ですよ`. diff --git a/changes/316-macos-playback-stats-daemon.md b/changes/316-macos-playback-stats-daemon.md new file mode 100644 index 00000000..841d5a92 --- /dev/null +++ b/changes/316-macos-playback-stats-daemon.md @@ -0,0 +1,4 @@ +type: fixed +area: stats + +- Kept regular app stats routing isolated from a separately running background stats daemon during playback startup. diff --git a/changes/318-jlpt-underline-selection.md b/changes/318-jlpt-underline-selection.md new file mode 100644 index 00000000..75ec64c1 --- /dev/null +++ b/changes/318-jlpt-underline-selection.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Kept JLPT subtitle underlines at their JLPT color after dictionary lookups, even when the token also has another annotation color. diff --git a/changes/319-interjection-annotation-filter.md b/changes/319-interjection-annotation-filter.md new file mode 100644 index 00000000..244665e4 --- /dev/null +++ b/changes/319-interjection-annotation-filter.md @@ -0,0 +1,4 @@ +type: fixed +area: tokenizer + +- Tokenizer: Suppress annotations for ハァ-style interjection subtitles. diff --git a/changes/320-current-subtitle-known-highlight.md b/changes/320-current-subtitle-known-highlight.md new file mode 100644 index 00000000..4c60f805 --- /dev/null +++ b/changes/320-current-subtitle-known-highlight.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Refresh the current subtitle after successful card mining so newly known words recolor immediately. diff --git a/changes/321-grammar-ending-pattern-filter.md b/changes/321-grammar-ending-pattern-filter.md new file mode 100644 index 00000000..d09a4057 --- /dev/null +++ b/changes/321-grammar-ending-pattern-filter.md @@ -0,0 +1,5 @@ +type: fixed +area: tokenizer + +- Tokenizer: Replaced hard-coded standalone grammar-ending permutations with shared pattern matching for polite copula, negative copula, and explanatory subtitle endings. +- Tokenizer: Kept grammar annotation exclusion logic in the shared subtitle filter and removed stale duplicate exclusion helpers from the annotation stage. diff --git a/changes/322-preserve-word-audio-manual-update.md b/changes/322-preserve-word-audio-manual-update.md new file mode 100644 index 00000000..42e0ce67 --- /dev/null +++ b/changes/322-preserve-word-audio-manual-update.md @@ -0,0 +1,4 @@ +type: fixed +area: anki + +- Anki: Manual clipboard subtitle updates now preserve existing word audio while replacing sentence audio and animated-image media. diff --git a/changes/323-macos-overlay-tracker-flaps.md b/changes/323-macos-overlay-tracker-flaps.md new file mode 100644 index 00000000..0f026c8a --- /dev/null +++ b/changes/323-macos-overlay-tracker-flaps.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Kept the macOS visible overlay alive during transient mpv tracker flaps when the last tracked video geometry is still available. diff --git a/changes/324-mpv-playlist-overlay-reuse.md b/changes/324-mpv-playlist-overlay-reuse.md new file mode 100644 index 00000000..31e64981 --- /dev/null +++ b/changes/324-mpv-playlist-overlay-reuse.md @@ -0,0 +1,4 @@ +type: fixed +area: mpv + +- mpv: Playlist navigation now reuses the running SubMiner overlay without repeating the pause-until-ready warmup gate. diff --git a/changes/325-jlpt-combined-annotation-underlines.md b/changes/325-jlpt-combined-annotation-underlines.md new file mode 100644 index 00000000..def6e4be --- /dev/null +++ b/changes/325-jlpt-combined-annotation-underlines.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Kept JLPT subtitle underlines on their JLPT color when lookup selection overlaps known-word or frequency annotation colors. diff --git a/changes/326-anilist-time-position-post-watch.md b/changes/326-anilist-time-position-post-watch.md new file mode 100644 index 00000000..c504aadb --- /dev/null +++ b/changes/326-anilist-time-position-post-watch.md @@ -0,0 +1,4 @@ +type: fixed +area: anilist + +- AniList: Run post-watch progress checks on mpv time-position updates, read the fresh mpv position before threshold checks, wire manual mark-watched to force a progress sync, and fill missing `guessit` episode metadata from the filename parser. diff --git a/changes/327-stats-daemon-deferral.md b/changes/327-stats-daemon-deferral.md new file mode 100644 index 00000000..9a6dd528 --- /dev/null +++ b/changes/327-stats-daemon-deferral.md @@ -0,0 +1,4 @@ +type: fixed +area: stats + +- Restored stats startup deferral to a running background stats daemon so video launches no longer fail when the stats port is already in use. diff --git a/changes/328-subtitle-prefetch-cache-hit-resume.md b/changes/328-subtitle-prefetch-cache-hit-resume.md new file mode 100644 index 00000000..f720f112 --- /dev/null +++ b/changes/328-subtitle-prefetch-cache-hit-resume.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Kept subtitle annotation prefetch running after immediate cache-hit renders so upcoming subtitle colors stay ready. diff --git a/changes/332-subtitle-frequency-ordinal-prefix.md b/changes/332-subtitle-frequency-ordinal-prefix.md new file mode 100644 index 00000000..7676cb4d --- /dev/null +++ b/changes/332-subtitle-frequency-ordinal-prefix.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Fixed frequency highlighting for ordinal prefix-noun tokens like `第二` so popup ranks such as JPDB 1820 are preserved in subtitle annotations. diff --git a/changes/333-aru-annotation-filter.md b/changes/333-aru-annotation-filter.md new file mode 100644 index 00000000..7f3425bc --- /dev/null +++ b/changes/333-aru-annotation-filter.md @@ -0,0 +1,4 @@ +type: fixed +area: tokenizer + +- Suppressed N+1, JLPT, frequency, and name styling for `ある` / `有る` existence verbs while still allowing known-word highlighting. diff --git a/changes/334-coderabbit-followups.md b/changes/334-coderabbit-followups.md new file mode 100644 index 00000000..05506611 --- /dev/null +++ b/changes/334-coderabbit-followups.md @@ -0,0 +1,4 @@ +type: fixed +area: anilist + +- AniList: Prevented duplicate post-watch writes during concurrent checks, preserved manual watched marks when post-watch sync fails, and kept known-word cache refresh notifications accurate after cache resets. diff --git a/changes/335-anilist-linux-token-setup.md b/changes/335-anilist-linux-token-setup.md new file mode 100644 index 00000000..201708cc --- /dev/null +++ b/changes/335-anilist-linux-token-setup.md @@ -0,0 +1,4 @@ +type: fixed +area: anilist + +- AniList: Kept config reload from opening the setup window during playback when token storage cannot be resolved, and stopped setup login from reporting success when encrypted token persistence fails. diff --git a/changes/336-hyprland-fullscreen-overlay.md b/changes/336-hyprland-fullscreen-overlay.md new file mode 100644 index 00000000..6bb3ed24 --- /dev/null +++ b/changes/336-hyprland-fullscreen-overlay.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Aligned the Hyprland fullscreen overlay with mpv when mpv reports client-requested fullscreen, force-applied exact Hyprland overlay window bounds after floating, disabled Hyprland floating-window decoration on exact overlay placement, compensated stats overlay placement for Electron/Wayland content insets, and made the stats overlay page/window opaque so mpv cannot show through transparent top insets. diff --git a/changes/336-overlay-focus-ring.md b/changes/336-overlay-focus-ring.md new file mode 100644 index 00000000..7596f35d --- /dev/null +++ b/changes/336-overlay-focus-ring.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Hid the browser focus outline on the top-level overlay surface so focused overlays no longer show a yellow/orange viewport border. diff --git a/changes/337-anilist-safe-storage-retry.md b/changes/337-anilist-safe-storage-retry.md new file mode 100644 index 00000000..6330e689 --- /dev/null +++ b/changes/337-anilist-safe-storage-retry.md @@ -0,0 +1,4 @@ +type: fixed +area: anilist + +- AniList: Retried Linux safeStorage availability after transient keyring startup failures so stored tokens can load and setup tokens can save once GNOME libsecret becomes available. diff --git a/changes/338-known-word-particle-highlights.md b/changes/338-known-word-particle-highlights.md new file mode 100644 index 00000000..c759e8e7 --- /dev/null +++ b/changes/338-known-word-particle-highlights.md @@ -0,0 +1,4 @@ +type: fixed +area: tokenizer + +- Prevented standalone grammar and helper tokens such as `に` from being colored as known words when readings from known-word decks match them. diff --git a/changes/339-hyprland-overlay-pin.md b/changes/339-hyprland-overlay-pin.md new file mode 100644 index 00000000..29316947 --- /dev/null +++ b/changes/339-hyprland-overlay-pin.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Stopped Hyprland from pinning SubMiner overlay windows across workspaces while keeping floating placement for fullscreen alignment. diff --git a/changes/340-default-subtitle-keybindings.md b/changes/340-default-subtitle-keybindings.md new file mode 100644 index 00000000..c5d71b55 --- /dev/null +++ b/changes/340-default-subtitle-keybindings.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Fixed the default replay/next subtitle keybindings by moving the session-help shortcut to `Ctrl/Cmd+/`, leaving `Ctrl+Shift+H` and `Ctrl+Shift+L` free for subtitle playback controls. The mpv plugin now registers shifted letter chords with mpv's uppercase key form so `Ctrl+Shift+L` reaches the play-next-subtitle action instead of falling through as `Ctrl+L`, and play-next now starts playback from a paused state before pausing again at the subtitle end. diff --git a/changes/344-macos-overlay-active-mpv.md b/changes/344-macos-overlay-active-mpv.md new file mode 100644 index 00000000..ab763933 --- /dev/null +++ b/changes/344-macos-overlay-active-mpv.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Kept the macOS overlay visible and interactive while mpv remains the active tracked window, including transient tracker refreshes. diff --git a/changes/346-stats-session-detail.md b/changes/346-stats-session-detail.md new file mode 100644 index 00000000..4fc61465 --- /dev/null +++ b/changes/346-stats-session-detail.md @@ -0,0 +1,4 @@ +type: fixed +area: stats + +- Fixed recent session detail pages showing "Media not found" before lifetime media summaries are available. diff --git a/changes/349-macos-overlay-z-order.md b/changes/349-macos-overlay-z-order.md new file mode 100644 index 00000000..fa053783 --- /dev/null +++ b/changes/349-macos-overlay-z-order.md @@ -0,0 +1,4 @@ +type: fixed +area: overlay + +- Overlay: Kept the macOS overlay behind unrelated foreground windows while preserving its position above mpv. diff --git a/changes/350-known-yomitan-token-highlighting.md b/changes/350-known-yomitan-token-highlighting.md new file mode 100644 index 00000000..6ca12016 --- /dev/null +++ b/changes/350-known-yomitan-token-highlighting.md @@ -0,0 +1,4 @@ +type: fixed +area: tokenizer + +- Tokenizer: Preserve Yomitan compound tokens for known-word highlighting so known component words no longer color a larger unknown word green. diff --git a/config.example.jsonc b/config.example.jsonc index 1b3470ed..d734c08a 100644 --- a/config.example.jsonc +++ b/config.example.jsonc @@ -175,7 +175,7 @@ "openCharacterDictionary": "CommandOrControl+Alt+A", // Open character dictionary setting. "openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting. "openJimaku": "Ctrl+Shift+J", // Open jimaku setting. - "openSessionHelp": "CommandOrControl+Shift+H", // Open session help setting. + "openSessionHelp": "CommandOrControl+Slash", // Open session help setting. "openControllerSelect": "Alt+C", // Open controller select setting. "openControllerDebug": "Alt+Shift+C", // Open controller debug setting. "toggleSubtitleSidebar": "Backslash" // Toggle subtitle sidebar setting. diff --git a/docs-site/anki-integration.md b/docs-site/anki-integration.md index 7a1c05f8..a024336d 100644 --- a/docs-site/anki-integration.md +++ b/docs-site/anki-integration.md @@ -213,7 +213,7 @@ Animated AVIF requires an AV1 encoder (`libaom-av1`, `libsvtav1`, or `librav1e`) } ``` -`overwriteAudio` applies to automatic card updates and duplicate-card enrichment. Manual clipboard subtitle updates (`Ctrl/Cmd+C`, then `Ctrl/Cmd+V`) always replace generated audio in both the expression audio field and sentence audio field. +`overwriteAudio` applies to automatic card updates and duplicate-card enrichment. Manual clipboard subtitle updates (`Ctrl/Cmd+C`, then `Ctrl/Cmd+V`) always replace generated sentence audio, while leaving the word audio field unchanged. ## AI Translation diff --git a/docs-site/configuration.md b/docs-site/configuration.md index eb8e2fbf..5654bfbe 100644 --- a/docs-site/configuration.md +++ b/docs-site/configuration.md @@ -537,7 +537,7 @@ See `config.example.jsonc` for detailed configuration options. "markAudioCard": "CommandOrControl+Shift+A", "openCharacterDictionary": "CommandOrControl+Alt+A", "openRuntimeOptions": "CommandOrControl+Shift+O", - "openSessionHelp": "CommandOrControl+Shift+H", + "openSessionHelp": "CommandOrControl+Slash", "openControllerSelect": "Alt+C", "openControllerDebug": "Alt+Shift+C", "openJimaku": "Ctrl+Shift+J", @@ -562,7 +562,7 @@ See `config.example.jsonc` for detailed configuration options. | `markAudioCard` | string \| `null` | Accelerator for marking last card as audio card (default: `"CommandOrControl+Shift+A"`) | | `openCharacterDictionary` | string \| `null` | Opens the character dictionary AniList selector (default: `"CommandOrControl+Alt+A"`) | | `openRuntimeOptions` | string \| `null` | Opens runtime options palette for live session-only toggles (default: `"CommandOrControl+Shift+O"`) | -| `openSessionHelp` | string \| `null` | Opens the in-overlay session help modal (default: `"CommandOrControl+Shift+H"`) | +| `openSessionHelp` | string \| `null` | Opens the in-overlay session help modal (default: `"CommandOrControl+Slash"`) | | `openControllerSelect` | string \| `null` | Opens the controller config/remap modal (default: `"Alt+C"`) | | `openControllerDebug` | string \| `null` | Opens the controller debug modal (default: `"Alt+Shift+C"`) | | `openJimaku` | string \| `null` | Opens the Jimaku search modal (default: `"Ctrl+Shift+J"`) | @@ -706,7 +706,7 @@ These shortcuts are only active when the overlay window is visible and automatic ### Session Help Modal -The session help modal opens from the overlay with `Ctrl/Cmd+Shift+H` by default. The mpv plugin also exposes it through the `Y-H` chord (falling back to `Y-K` if needed). It shows the current session keybindings and color legend. +The session help modal opens from the overlay with `Ctrl/Cmd+/` by default. The mpv plugin also exposes it through the `Y-H` chord (falling back to `Y-K` if needed). It shows the current session keybindings and color legend. You can filter the modal quickly with `/`: @@ -893,7 +893,7 @@ This example is intentionally compact. The option table below documents availabl | `media.audioPadding` | number (seconds) | Padding around audio clip timing (default: `0.5`) | | `media.fallbackDuration` | number (seconds) | Default duration if timing unavailable (default: `3.0`) | | `media.maxMediaDuration` | number (seconds) | Max duration for generated media from multi-line copy (default: `30`, `0` to disable) | -| `behavior.overwriteAudio` | `true`, `false` | Replace existing audio on updates; when `false`, new audio is appended/prepended per `behavior.mediaInsertMode`; manual clipboard updates always replace generated audio (default: `true`) | +| `behavior.overwriteAudio` | `true`, `false` | Replace existing audio on updates; when `false`, new audio is appended/prepended per `behavior.mediaInsertMode`; manual clipboard updates always replace generated sentence audio (default: `true`) | | `behavior.overwriteImage` | `true`, `false` | Replace existing images on updates; when `false`, new images are appended/prepended per `behavior.mediaInsertMode` (default: `true`) | | `behavior.mediaInsertMode` | `"append"`, `"prepend"` | Where to insert new media when overwrite is off (default: `"append"`) | | `behavior.highlightWord` | `true`, `false` | Highlight the word in sentence context (default: `true`) | diff --git a/docs-site/mining-workflow.md b/docs-site/mining-workflow.md index 5154a470..406dc15d 100644 --- a/docs-site/mining-workflow.md +++ b/docs-site/mining-workflow.md @@ -100,7 +100,7 @@ If you prefer a hands-on approach (animecards-style), you can copy the current s - For multiple lines: press `Ctrl/Cmd+Shift+C`, then a digit `1`–`9` to select how many recent subtitle lines to combine. The combined text is copied to the clipboard. 3. Press `Ctrl/Cmd+V` to update the last-added card with the clipboard contents plus audio, image, and translation — the same fields auto-update would fill. -Manual clipboard updates always replace generated audio in both the expression audio field and sentence audio field, even when `ankiConnect.behavior.overwriteAudio` is disabled. The manual flow assumes you are intentionally replacing the proxy-generated clip on the newest card. +Manual clipboard updates always replace generated sentence audio, even when `ankiConnect.behavior.overwriteAudio` is disabled. The word audio field is left unchanged because the word itself does not change in this flow. This is useful when auto-update is disabled or when you want explicit control over which subtitle line gets attached to the card. diff --git a/docs-site/public/config.example.jsonc b/docs-site/public/config.example.jsonc index 1b3470ed..d734c08a 100644 --- a/docs-site/public/config.example.jsonc +++ b/docs-site/public/config.example.jsonc @@ -175,7 +175,7 @@ "openCharacterDictionary": "CommandOrControl+Alt+A", // Open character dictionary setting. "openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting. "openJimaku": "Ctrl+Shift+J", // Open jimaku setting. - "openSessionHelp": "CommandOrControl+Shift+H", // Open session help setting. + "openSessionHelp": "CommandOrControl+Slash", // Open session help setting. "openControllerSelect": "Alt+C", // Open controller select setting. "openControllerDebug": "Alt+Shift+C", // Open controller debug setting. "toggleSubtitleSidebar": "Backslash" // Toggle subtitle sidebar setting. diff --git a/docs-site/shortcuts.md b/docs-site/shortcuts.md index b37a0e8c..8b4e263d 100644 --- a/docs-site/shortcuts.md +++ b/docs-site/shortcuts.md @@ -69,7 +69,7 @@ Mouse-hover playback behavior is configured separately from shortcuts: `subtitle | `Ctrl/Cmd+Shift+V` | Cycle secondary subtitle mode (hidden → visible → hover) | `shortcuts.toggleSecondarySub` | | `Ctrl/Cmd+Alt+A` | Open character dictionary AniList selector | `shortcuts.openCharacterDictionary` | | `Ctrl/Cmd+Shift+O` | Open runtime options palette | `shortcuts.openRuntimeOptions` | -| `Ctrl/Cmd+Shift+H` | Open session help modal | `shortcuts.openSessionHelp` | +| `Ctrl/Cmd+/` | Open session help modal | `shortcuts.openSessionHelp` | | `Ctrl+Shift+J` | Open Jimaku subtitle search modal | `shortcuts.openJimaku` | | `Ctrl+Alt+C` | Open the manual YouTube subtitle picker | `keybindings` | | `Ctrl+Alt+S` | Open subtitle sync (subsync) modal | `shortcuts.triggerSubsync` | diff --git a/docs-site/troubleshooting.md b/docs-site/troubleshooting.md index a6f9f255..c4ac5059 100644 --- a/docs-site/troubleshooting.md +++ b/docs-site/troubleshooting.md @@ -324,6 +324,10 @@ Add a `pass` rule for each global shortcut you configure. The defaults are `Alt+ Without these rules, Hyprland intercepts the keypresses before they reach SubMiner, and the shortcuts silently do nothing. +**Overlay stays behind mpv after fullscreen** + +SubMiner watches mpv's `fullscreen` property and refreshes the overlay geometry when it changes. If the overlay still does not move or rise above fullscreen mpv, confirm that the mpv IPC socket is connected and that `hyprctl -j clients` and `hyprctl -j monitors` work from the same environment that launched SubMiner. + For more details, see the Hyprland docs on [global keybinds](https://wiki.hypr.land/Configuring/Binds/#global-keybinds) and [window rules](https://wiki.hypr.land/Configuring/Window-Rules/). ### macOS diff --git a/docs-site/usage.md b/docs-site/usage.md index 8d69c74a..947ab1e8 100644 --- a/docs-site/usage.md +++ b/docs-site/usage.md @@ -334,7 +334,7 @@ Useful overlay-local default keybinding: `Ctrl+Alt+P` opens the playlist browser Press `V` to hide or restore the primary SubMiner subtitle bar. The mpv plugin also binds bare `v` to the same action, overriding mpv's native primary subtitle visibility toggle. -`Ctrl/Cmd+Shift+H` opens the session help modal with the current overlay and mpv keybindings. If you use the mpv plugin, the same help view is also available through the `y-h` chord. +`Ctrl/Cmd+/` opens the session help modal with the current overlay and mpv keybindings. If you use the mpv plugin, the same help view is also available through the `y-h` chord. Hovering over subtitle text pauses mpv by default; leaving resumes it. Yomitan popups also pause playback by default. Set `subtitleStyle.autoPauseVideoOnHover: false` or `subtitleStyle.autoPauseVideoOnYomitanPopup: false` to disable either behavior. diff --git a/package.json b/package.json index 19b24877..ed8decf8 100644 --- a/package.json +++ b/package.json @@ -45,10 +45,10 @@ "test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/integrations.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts src/verify-config-example.test.ts", "test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/integrations.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js dist/verify-config-example.test.js", "test:config:smoke:dist": "bun test dist/config/path-resolution.test.js", - "test:plugin:src": "lua scripts/test-plugin-lua-compat.lua && lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua", + "test:plugin:src": "lua scripts/test-plugin-lua-compat.lua && lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-session-bindings.lua && lua scripts/test-plugin-binary-windows.lua", "test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts", "test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/mpv.test.ts launcher/picker.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src", - "test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/stats-window.test.ts src/core/services/__tests__/stats-server.test.ts src/main/runtime/stats-server-routing.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/services/anilist/rate-limiter.test.ts src/core/services/jlpt-token-filter.test.ts src/core/services/subtitle-position.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts stats/src/lib/api-client.test.ts stats/src/hooks/useExcludedWords.test.ts", + "test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/stats-window.test.ts src/core/services/__tests__/stats-server.test.ts src/main/runtime/stats-server-routing.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/services/anilist/rate-limiter.test.ts src/core/services/jlpt-token-filter.test.ts src/core/services/subtitle-position.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts stats/src/lib/api-client.test.ts stats/src/hooks/useExcludedWords.test.ts stats/src/styles/globals.test.ts", "test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/core/services/anilist/rate-limiter.test.js dist/core/services/jlpt-token-filter.test.js dist/core/services/subtitle-position.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/hyprland-tracker.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js", "test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js", "test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist", diff --git a/plugin/subminer/lifecycle.lua b/plugin/subminer/lifecycle.lua index 2a8899a5..fcfedb93 100644 --- a/plugin/subminer/lifecycle.lua +++ b/plugin/subminer/lifecycle.lua @@ -83,11 +83,17 @@ function M.create(ctx) return end - aniskip.clear_aniskip_state() - process.disarm_auto_play_ready_gate() - local has_matching_socket = rearm_managed_subtitle_defaults() - local should_auto_start = resolve_auto_start_enabled() + local has_matching_socket = process.has_matching_mpv_ipc_socket(opts.socket_path) + local preserve_active_auto_start_gate = ( + state.overlay_running and state.auto_play_ready_gate_armed and should_auto_start and has_matching_socket + ) + aniskip.clear_aniskip_state() + if not preserve_active_auto_start_gate then + process.disarm_auto_play_ready_gate() + end + has_matching_socket = rearm_managed_subtitle_defaults() + if should_auto_start then if not has_matching_socket then subminer_log( diff --git a/plugin/subminer/process.lua b/plugin/subminer/process.lua index 79c6225c..ab73799f 100644 --- a/plugin/subminer/process.lua +++ b/plugin/subminer/process.lua @@ -299,14 +299,7 @@ function M.create(ctx) if overrides.auto_start_trigger == true then subminer_log("debug", "process", "Auto-start ignored because overlay is already running") local socket_path = overrides.socket_path or opts.socket_path - local should_pause_until_ready = ( - resolve_visible_overlay_startup() - and resolve_pause_until_ready() - and has_matching_mpv_ipc_socket(socket_path) - ) - if should_pause_until_ready then - arm_auto_play_ready_gate() - else + if not state.auto_play_ready_gate_armed then disarm_auto_play_ready_gate() end local visibility_action = resolve_visible_overlay_startup() diff --git a/plugin/subminer/session_bindings.lua b/plugin/subminer/session_bindings.lua index 718a1041..fee7ec14 100644 --- a/plugin/subminer/session_bindings.lua +++ b/plugin/subminer/session_bindings.lua @@ -96,16 +96,30 @@ function M.create(ctx) return nil end + local shifted_letter = key.code:match("^Key([A-Z])$") + local has_shift = false + for _, modifier in ipairs(key.modifiers) do + if modifier == "shift" then + has_shift = true + break + end + end + local key_name = key_code_to_mpv_name(key.code) + if shifted_letter and has_shift then + key_name = shifted_letter + end if not key_name then return nil end local parts = {} for _, modifier in ipairs(key.modifiers) do - local mapped = MODIFIER_MAP[modifier] - if mapped then - parts[#parts + 1] = mapped + if not (modifier == "shift" and shifted_letter) then + local mapped = MODIFIER_MAP[modifier] + if mapped then + parts[#parts + 1] = mapped + end end end parts[#parts + 1] = key_name @@ -145,6 +159,8 @@ function M.create(ctx) return { "--open-youtube-picker" } elseif action_id == "openSessionHelp" then return { "--open-session-help" } + elseif action_id == "openCharacterDictionary" then + return { "--open-character-dictionary" } elseif action_id == "openControllerSelect" then return { "--open-controller-select" } elseif action_id == "openControllerDebug" then @@ -225,16 +241,39 @@ function M.create(ctx) end end - local function start_numeric_selection(action_id, timeout_ms) + local function build_modifier_prefixes(modifiers) + local prefixes = { "" } + if type(modifiers) ~= "table" then + return prefixes + end + + for _, modifier in ipairs(modifiers) do + local mapped = MODIFIER_MAP[modifier] + if mapped then + local existing_count = #prefixes + for index = 1, existing_count do + prefixes[#prefixes + 1] = prefixes[index] .. mapped .. "+" + end + end + end + return prefixes + end + + local function start_numeric_selection(action_id, timeout_ms, starter_modifiers) clear_numeric_selection(false) + local modifier_prefixes = build_modifier_prefixes(starter_modifiers) for digit = 1, 9 do local digit_string = tostring(digit) - local name = "subminer-session-digit-" .. digit_string - state.session_numeric_binding_names[#state.session_numeric_binding_names + 1] = name - mp.add_forced_key_binding(digit_string, name, function() - clear_numeric_selection(false) - invoke_cli_action(action_id, { count = digit }) - end) + for _, prefix in ipairs(modifier_prefixes) do + local key_name = prefix .. digit_string + local modifier_name = prefix:gsub("[^%w]", "-") + local name = "subminer-session-digit-" .. modifier_name .. digit_string + state.session_numeric_binding_names[#state.session_numeric_binding_names + 1] = name + mp.add_forced_key_binding(key_name, name, function() + clear_numeric_selection(false) + invoke_cli_action(action_id, { count = digit }) + end) + end end state.session_numeric_binding_names[#state.session_numeric_binding_names + 1] = @@ -272,7 +311,7 @@ function M.create(ctx) end if binding.actionId == "copySubtitleMultiple" or binding.actionId == "mineSentenceMultiple" then - start_numeric_selection(binding.actionId, numeric_selection_timeout_ms) + start_numeric_selection(binding.actionId, numeric_selection_timeout_ms, binding.key.modifiers) return end diff --git a/scripts/test-plugin-session-bindings.lua b/scripts/test-plugin-session-bindings.lua new file mode 100644 index 00000000..40552dd5 --- /dev/null +++ b/scripts/test-plugin-session-bindings.lua @@ -0,0 +1,200 @@ +package.path = "plugin/subminer/?.lua;" .. package.path + +local session_bindings = require("session_bindings") + +local function assert_true(condition, message) + if condition then + return + end + error(message) +end + +local artifact_path = ".tmp/test-plugin-session-bindings.json" +local is_windows = package.config:sub(1, 1) == "\\" +local mkdir_cmd = is_windows and "mkdir .tmp >NUL 2>NUL" or "mkdir -p .tmp" +os.execute(mkdir_cmd) +local handle = assert(io.open(artifact_path, "w")) +handle:write("__SESSION_BINDINGS__") +handle:close() + +local recorded = { + bindings = {}, + removed = {}, + async_calls = {}, + osd = {}, +} + +local mp = {} + +function mp.add_forced_key_binding(keys, name, fn) + recorded.bindings[#recorded.bindings + 1] = { + keys = keys, + name = name, + fn = fn, + } +end + +function mp.remove_key_binding(name) + recorded.removed[#recorded.removed + 1] = name +end + +function mp.add_timeout(seconds, callback) + return { + seconds = seconds, + callback = callback, + killed = false, + kill = function(self) + self.killed = true + end, + } +end + +function mp.osd_message(message) + recorded.osd[#recorded.osd + 1] = message +end + +local ctx = { + mp = mp, + utils = { + parse_json = function(raw) + if raw ~= "__SESSION_BINDINGS__" then + return nil, "unexpected artifact" + end + return { + numericSelectionTimeoutMs = 3000, + bindings = { + { + key = { + code = "KeyS", + modifiers = { "ctrl", "shift" }, + }, + actionType = "session-action", + actionId = "mineSentenceMultiple", + }, + { + key = { + code = "KeyL", + modifiers = { "ctrl", "shift" }, + }, + actionType = "session-action", + actionId = "playNextSubtitle", + }, + { + key = { + code = "KeyA", + modifiers = { "alt", "meta" }, + }, + actionType = "session-action", + actionId = "openCharacterDictionary", + }, + { + key = { + code = "KeyL", + modifiers = { "shift" }, + }, + actionType = "mpv-command", + command = { "sub-seek", 1 }, + }, + }, + }, nil + end, + }, + state = { + binary_path = "/tmp/subminer", + session_binding_names = {}, + session_numeric_binding_names = {}, + session_numeric_selection = nil, + }, + process = { + check_binary_available = function() + return true + end, + run_binary_command_async = function(args) + recorded.async_calls[#recorded.async_calls + 1] = args + end, + }, + environment = { + resolve_session_bindings_artifact_path = function() + return artifact_path + end, + }, + log = { + subminer_log = function() end, + show_osd = function(message) + recorded.osd[#recorded.osd + 1] = message + end, + }, +} + +local bindings = session_bindings.create(ctx) +assert_true(bindings.register_bindings(), "session bindings should register") + +local starter = nil +for _, binding in ipairs(recorded.bindings) do + if binding.keys == "Ctrl+S" then + starter = binding + break + end +end +assert_true(starter ~= nil, "multi-mine starter binding should be registered") + +local play_next = nil +for _, binding in ipairs(recorded.bindings) do + if binding.keys == "Ctrl+L" then + play_next = binding + break + end +end +assert_true(play_next ~= nil, "play-next subtitle binding should use mpv shifted-letter form") + +local subtitle_jump = nil +for _, binding in ipairs(recorded.bindings) do + if binding.keys == "L" then + subtitle_jump = binding + break + end +end +assert_true(subtitle_jump ~= nil, "shifted subtitle jump binding should use mpv uppercase letter form") + +play_next.fn() +local play_next_call = recorded.async_calls[#recorded.async_calls] +assert_true(play_next_call ~= nil, "play-next binding should invoke CLI action") +assert_true(play_next_call[2] == "--play-next-subtitle", "play-next binding should pass CLI flag") + +local character_dictionary = nil +for _, binding in ipairs(recorded.bindings) do + if binding.keys == "Alt+Meta+a" then + character_dictionary = binding + break + end +end +assert_true(character_dictionary ~= nil, "character dictionary binding should be registered") + +character_dictionary.fn() +local character_dictionary_call = recorded.async_calls[#recorded.async_calls] +assert_true(character_dictionary_call ~= nil, "character dictionary binding should invoke CLI action") +assert_true( + character_dictionary_call[2] == "--open-character-dictionary", + "character dictionary binding should pass CLI flag" +) + +starter.fn() + +local modified_digit = nil +for _, binding in ipairs(recorded.bindings) do + if binding.keys == "Ctrl+Shift+3" then + modified_digit = binding + break + end +end +assert_true(modified_digit ~= nil, "numeric selection should bind Ctrl+Shift+3") + +modified_digit.fn() + +local call = recorded.async_calls[#recorded.async_calls] +assert_true(call ~= nil, "modified digit should invoke CLI action") +assert_true(call[1] == "/tmp/subminer", "CLI action should use configured binary") +assert_true(call[2] == "--mine-sentence-count", "CLI action should mine sentence count") +assert_true(call[3] == "3", "CLI action should pass selected count") + +print("plugin session binding regression tests: OK") diff --git a/scripts/test-plugin-start-gate.lua b/scripts/test-plugin-start-gate.lua index 9cf0c2ac..0656e777 100644 --- a/scripts/test-plugin-start-gate.lua +++ b/scripts/test-plugin-start-gate.lua @@ -559,6 +559,49 @@ do ) end +do + local recorded, err = run_plugin_scenario({ + process_list = "", + option_overrides = { + binary_path = binary_path, + auto_start = "yes", + auto_start_visible_overlay = "yes", + auto_start_pause_until_ready = "yes", + socket_path = "/tmp/subminer-socket", + }, + input_ipc_server = "/tmp/subminer-socket", + media_title = "Random Movie", + files = { + [binary_path] = true, + }, + }) + assert_true(recorded ~= nil, "plugin failed to load for pre-ready duplicate auto-start scenario: " .. tostring(err)) + fire_event(recorded, "file-loaded") + fire_event(recorded, "file-loaded") + assert_true(recorded.script_messages["subminer-autoplay-ready"] ~= nil, "subminer-autoplay-ready script message not registered") + assert_true( + count_start_calls(recorded.async_calls) == 1, + "pre-ready duplicate auto-start should not issue duplicate --start commands" + ) + assert_true( + count_property_set(recorded.property_sets, "pause", true) == 1, + "pre-ready duplicate auto-start should not repeat the pause gate" + ) + assert_true( + count_property_set(recorded.property_sets, "pause", false) == 0, + "pre-ready duplicate auto-start should not resume playback before tokenization is ready" + ) + assert_true( + count_osd_message(recorded.osd, "SubMiner: Loading subtitle tokenization...") == 1, + "pre-ready duplicate auto-start should not repeat the loading OSD" + ) + recorded.script_messages["subminer-autoplay-ready"]() + assert_true( + count_property_set(recorded.property_sets, "pause", false) == 1, + "autoplay-ready should resume the original pre-ready gate" + ) +end + do local recorded, err = run_plugin_scenario({ process_list = "", @@ -906,23 +949,23 @@ do ) assert_true( count_control_calls(recorded.async_calls, "--show-visible-overlay") == 4, - "duplicate pause-until-ready auto-start should re-assert visible overlay on both start and ready events" + "duplicate pause-until-ready auto-start should re-assert visible overlay on initial start, ready, and later file load" ) assert_true( - count_osd_message(recorded.osd, "SubMiner: Loading subtitle tokenization...") == 2, - "duplicate pause-until-ready auto-start should arm tokenization loading gate for each file" + count_osd_message(recorded.osd, "SubMiner: Loading subtitle tokenization...") == 1, + "duplicate pause-until-ready auto-start should not repeat tokenization loading gate after overlay is running" ) assert_true( - count_osd_message(recorded.osd, "SubMiner: Subtitle tokenization ready") == 2, - "duplicate pause-until-ready auto-start should release tokenization gate for each file" + count_osd_message(recorded.osd, "SubMiner: Subtitle tokenization ready") == 1, + "duplicate pause-until-ready auto-start should not wait for a second readiness signal after overlay is running" ) assert_true( - count_property_set(recorded.property_sets, "pause", true) == 2, - "duplicate pause-until-ready auto-start should force pause for each file" + count_property_set(recorded.property_sets, "pause", true) == 1, + "duplicate pause-until-ready auto-start should not force pause after overlay is running" ) assert_true( - count_property_set(recorded.property_sets, "pause", false) == 2, - "duplicate pause-until-ready auto-start should resume playback for each file" + count_property_set(recorded.property_sets, "pause", false) == 1, + "duplicate pause-until-ready auto-start should not resume a gate that was never rearmed" ) end diff --git a/src/anki-integration.test.ts b/src/anki-integration.test.ts index a3fbf85b..a6531348 100644 --- a/src/anki-integration.test.ts +++ b/src/anki-integration.test.ts @@ -177,6 +177,44 @@ test('AnkiIntegration.refreshKnownWordCache skips work when highlight mode is di } }); +test('AnkiIntegration notifies when mined note info updates known words', () => { + const ctx = createIntegrationTestContext({ + stateDirPrefix: 'subminer-anki-integration-known-update-', + }); + let notifications = 0; + + try { + const integrationState = ctx.integration as unknown as { + config: AnkiConnectConfig; + appendKnownWordsFromNoteInfo: (noteInfo: { + noteId: number; + fields: Record; + }) => void; + }; + integrationState.config.deck = 'Mining'; + integrationState.config.knownWords = { + ...integrationState.config.knownWords, + decks: { + Mining: ['Word'], + }, + }; + ctx.integration.setKnownWordCacheUpdatedCallback(() => { + notifications += 1; + }); + integrationState.appendKnownWordsFromNoteInfo({ + noteId: 42, + fields: { + Word: { value: '食べる' }, + }, + }); + + assert.equal(ctx.integration.isKnownWord('食べる'), true); + assert.equal(notifications, 1); + } finally { + cleanupIntegrationTestContext(ctx); + } +}); + test('AnkiIntegration.refreshKnownWordCache deduplicates concurrent refreshes', async () => { let releaseFindNotes: (() => void) | undefined; const findNotesPromise = new Promise((resolve) => { diff --git a/src/anki-integration.ts b/src/anki-integration.ts index 01b2ab17..f477788b 100644 --- a/src/anki-integration.ts +++ b/src/anki-integration.ts @@ -148,6 +148,7 @@ export class AnkiIntegration { private runtime: AnkiIntegrationRuntime; private aiConfig: AiConfig; private recordCardsMinedCallback: ((count: number, noteIds?: number[]) => void) | null = null; + private knownWordCacheUpdatedCallback: (() => void) | null = null; private noteIdRedirects = new Map(); private trackedDuplicateNoteIds = new Map(); @@ -552,10 +553,25 @@ export class AnkiIntegration { return; } - this.knownWordCache.appendFromNoteInfo({ + const changed = this.knownWordCache.appendFromNoteInfo({ noteId: noteInfo.noteId, fields: noteInfo.fields, }); + if (changed) { + this.notifyKnownWordCacheUpdated(); + } + } + + private notifyKnownWordCacheUpdated(): void { + if (!this.knownWordCacheUpdatedCallback) { + return; + } + + try { + this.knownWordCacheUpdatedCallback(); + } catch (error) { + log.warn('Known-word cache update callback failed:', (error as Error).message); + } } private getLapisConfig(): { @@ -1267,6 +1283,10 @@ export class AnkiIntegration { this.recordCardsMinedCallback = callback; } + setKnownWordCacheUpdatedCallback(callback: (() => void) | null): void { + this.knownWordCacheUpdatedCallback = callback; + } + resolveCurrentNoteId(noteId: number): number { let resolved = noteId; const seen = new Set(); diff --git a/src/anki-integration/card-creation-manual-update.test.ts b/src/anki-integration/card-creation-manual-update.test.ts index fe1bea29..e33de200 100644 --- a/src/anki-integration/card-creation-manual-update.test.ts +++ b/src/anki-integration/card-creation-manual-update.test.ts @@ -126,7 +126,7 @@ function createManualUpdateService(overrides: Partial = {}): { }; } -test('manual clipboard subtitle update replaces expression and sentence audio even when overwriteAudio is disabled', async () => { +test('manual clipboard subtitle update replaces sentence audio without touching expression audio', async () => { const { service, updatedFields, mergeCalls, storedMedia } = createManualUpdateService(); await service.updateLastAddedFromClipboard('字幕'); @@ -134,10 +134,44 @@ test('manual clipboard subtitle update replaces expression and sentence audio ev assert.equal(updatedFields.length, 1); assert.equal(storedMedia.length, 1); const audioValue = `[sound:${storedMedia[0]}]`; - assert.equal(updatedFields[0]?.ExpressionAudio, audioValue); assert.equal(updatedFields[0]?.SentenceAudio, audioValue); + assert.equal('ExpressionAudio' in updatedFields[0]!, false); assert.deepEqual( mergeCalls.map((call) => call.overwrite), - [true, true], + [true], ); }); + +test('manual clipboard subtitle update skips audio when sentence audio field is missing', async () => { + const { service, updatedFields, mergeCalls, storedMedia } = createManualUpdateService({ + client: { + addNote: async () => 0, + addTags: async () => undefined, + notesInfo: async () => [ + { + noteId: 42, + fields: { + Expression: { value: '単語' }, + Sentence: { value: '' }, + ExpressionAudio: { value: '[sound:auto-expression.mp3]' }, + }, + }, + ], + updateNoteFields: async (_noteId, fields) => { + updatedFields.push(fields); + }, + storeMediaFile: async (filename) => { + storedMedia.push(filename); + }, + findNotes: async () => [42], + retrieveMediaFile: async () => '', + }, + }); + + await service.updateLastAddedFromClipboard('字幕'); + + assert.equal(storedMedia.length, 1); + assert.equal(updatedFields.length, 1); + assert.deepEqual(updatedFields[0], { Sentence: '字幕' }); + assert.equal(mergeCalls.length, 0); +}); diff --git a/src/anki-integration/card-creation.ts b/src/anki-integration/card-creation.ts index 2d2fc536..a5349fd8 100644 --- a/src/anki-integration/card-creation.ts +++ b/src/anki-integration/card-creation.ts @@ -218,11 +218,7 @@ export class CardCreationService { fields, this.deps.getConfig(), ); - const sentenceAudioField = this.getResolvedSentenceAudioFieldName(noteInfo); - const expressionAudioField = this.deps.resolveConfiguredFieldName( - noteInfo, - this.deps.getConfig().fields?.audio || 'ExpressionAudio', - ); + const sentenceAudioField = this.getResolvedSentenceOnlyAudioFieldName(noteInfo); const sentenceField = this.deps.getEffectiveSentenceCardConfig().sentenceField; const sentence = blocks.join(' '); @@ -252,22 +248,15 @@ export class CardCreationService { if (audioBuffer) { await this.deps.client.storeMediaFile(audioFilename, audioBuffer); - if (sentenceAudioField || expressionAudioField) { + if (sentenceAudioField) { const audioValue = `[sound:${audioFilename}]`; - const audioFields = new Set( - [sentenceAudioField, expressionAudioField].filter( - (fieldName): fieldName is string => Boolean(fieldName), - ), + const existingAudio = noteInfo.fields[sentenceAudioField]?.value || ''; + // Manual clipboard updates intentionally replace old captured sentence audio. + updatedFields[sentenceAudioField] = this.deps.mergeFieldValue( + existingAudio, + audioValue, + true, ); - for (const audioField of audioFields) { - const existingAudio = noteInfo.fields[audioField]?.value || ''; - // Manual clipboard updates intentionally replace old captured audio. - updatedFields[audioField] = this.deps.mergeFieldValue( - existingAudio, - audioValue, - true, - ); - } } miscInfoFilename = audioFilename; updatePerformed = true; @@ -732,6 +721,13 @@ export class CardCreationService { ); } + private getResolvedSentenceOnlyAudioFieldName(noteInfo: CardCreationNoteInfo): string | null { + return this.deps.resolveNoteFieldName( + noteInfo, + this.deps.getEffectiveSentenceCardConfig().audioField || 'SentenceAudio', + ); + } + private createPendingNoteInfo(fields: Record): CardCreationNoteInfo { return { noteId: -1, diff --git a/src/anki-integration/known-word-cache.test.ts b/src/anki-integration/known-word-cache.test.ts index 4db0db9c..6ce4ba23 100644 --- a/src/anki-integration/known-word-cache.test.ts +++ b/src/anki-integration/known-word-cache.test.ts @@ -520,6 +520,51 @@ test('KnownWordCacheManager uses the current deck fields for immediate append', } }); +test('KnownWordCacheManager reports immediate append cache clears as mutations', () => { + const config: AnkiConnectConfig = { + fields: { + word: 'Expression', + }, + knownWords: { + highlightEnabled: true, + refreshMinutes: 60, + }, + }; + const { manager, statePath, cleanup } = createKnownWordCacheHarness(config); + + try { + fs.writeFileSync( + statePath, + JSON.stringify({ + version: 2, + refreshedAtMs: Date.now(), + scope: '{"refreshMinutes":60,"scope":"is:note","fieldsWord":"Expression"}', + words: ['猫'], + notes: { + '1': ['猫'], + }, + }), + 'utf-8', + ); + manager.startLifecycle(); + assert.equal(manager.isKnownWord('猫'), true); + + config.fields = { word: 'Word' }; + const changed = manager.appendFromNoteInfo({ + noteId: 2, + fields: { + Word: { value: '' }, + }, + }); + + assert.equal(changed, true); + assert.equal(manager.isKnownWord('猫'), false); + } finally { + manager.stopLifecycle(); + cleanup(); + } +}); + test('KnownWordCacheManager skips immediate append when addMinedWordsImmediately is disabled', () => { const config: AnkiConnectConfig = { knownWords: { diff --git a/src/anki-integration/known-word-cache.ts b/src/anki-integration/known-word-cache.ts index a4de17cc..5bc72fd6 100644 --- a/src/anki-integration/known-word-cache.ts +++ b/src/anki-integration/known-word-cache.ts @@ -165,13 +165,15 @@ export class KnownWordCacheManager { } } - appendFromNoteInfo(noteInfo: KnownWordCacheNoteInfo): void { + appendFromNoteInfo(noteInfo: KnownWordCacheNoteInfo): boolean { if (!this.isKnownWordCacheEnabled() || !this.shouldAddMinedWordsImmediately()) { - return; + return false; } + let didMutateCache = false; const currentStateKey = this.getKnownWordCacheStateKey(); if (this.knownWordsStateKey && this.knownWordsStateKey !== currentStateKey) { + didMutateCache = this.knownWords.size > 0 || this.noteWordsById.size > 0; this.clearKnownWordCacheState(); } if (!this.knownWordsStateKey) { @@ -180,13 +182,13 @@ export class KnownWordCacheManager { const preferredFields = this.getImmediateAppendFields(); if (!preferredFields) { - return; + return didMutateCache; } const nextWords = this.extractNormalizedKnownWordsFromNoteInfo(noteInfo, preferredFields); const changed = this.replaceNoteSnapshot(noteInfo.noteId, nextWords); if (!changed) { - return; + return didMutateCache; } if (this.knownWordsLastRefreshedAtMs <= 0) { @@ -199,6 +201,7 @@ export class KnownWordCacheManager { `wordCount=${nextWords.length}`, `scope=${getKnownWordCacheScopeForConfig(this.deps.getConfig())}`, ); + return true; } clearKnownWordCacheState(): void { diff --git a/src/config/definitions/defaults-core.ts b/src/config/definitions/defaults-core.ts index b590cbd7..bcff4c9d 100644 --- a/src/config/definitions/defaults-core.ts +++ b/src/config/definitions/defaults-core.ts @@ -89,7 +89,7 @@ export const CORE_DEFAULT_CONFIG: Pick< openCharacterDictionary: 'CommandOrControl+Alt+A', openRuntimeOptions: 'CommandOrControl+Shift+O', openJimaku: 'Ctrl+Shift+J', - openSessionHelp: 'CommandOrControl+Shift+H', + openSessionHelp: 'CommandOrControl+Slash', openControllerSelect: 'Alt+C', openControllerDebug: 'Alt+Shift+C', toggleSubtitleSidebar: 'Backslash', diff --git a/src/config/definitions/domain-registry.test.ts b/src/config/definitions/domain-registry.test.ts index 8c85b57e..8a0f296f 100644 --- a/src/config/definitions/domain-registry.test.ts +++ b/src/config/definitions/domain-registry.test.ts @@ -92,3 +92,11 @@ test('default keybindings include fullscreen on F', () => { ); assert.deepEqual(keybindingMap.get('KeyF'), ['cycle', 'fullscreen']); }); + +test('default keybindings include replay and next subtitle controls', () => { + const keybindingMap = new Map( + DEFAULT_KEYBINDINGS.map((binding) => [binding.key, binding.command]), + ); + assert.deepEqual(keybindingMap.get('Ctrl+Shift+KeyH'), ['__replay-subtitle']); + assert.deepEqual(keybindingMap.get('Ctrl+Shift+KeyL'), ['__play-next-subtitle']); +}); diff --git a/src/core/services/anilist/anilist-token-store.test.ts b/src/core/services/anilist/anilist-token-store.test.ts index a31d9d19..f36a96af 100644 --- a/src/core/services/anilist/anilist-token-store.test.ts +++ b/src/core/services/anilist/anilist-token-store.test.ts @@ -38,6 +38,24 @@ function createPassthroughStorage(): SafeStorageLike { }; } +function createTransientUnavailableStorage(): SafeStorageLike & { + setAvailable: (next: boolean) => void; +} { + let available = false; + return { + isEncryptionAvailable: () => available, + encryptString: (value: string) => Buffer.from(`enc:${value}`, 'utf-8'), + decryptString: (value: Buffer) => { + const raw = value.toString('utf-8'); + return raw.startsWith('enc:') ? raw.slice(4) : raw; + }, + getSelectedStorageBackend: () => (available ? 'gnome_libsecret' : 'unknown'), + setAvailable(next: boolean) { + available = next; + }, + } as SafeStorageLike & { setAvailable: (next: boolean) => void }; +} + test('anilist token store saves and loads encrypted token', () => { const filePath = createTempTokenFile(); const store = createAnilistTokenStore(filePath, createLogger(), createStorage(true)); @@ -61,6 +79,27 @@ test('anilist token store refuses to persist token when encryption unavailable', assert.equal(store.loadToken(), null); }); +test('anilist token store retries safeStorage after transient encryption unavailability', () => { + const filePath = createTempTokenFile(); + fs.writeFileSync( + filePath, + JSON.stringify({ + encryptedToken: Buffer.from('enc:stored-token', 'utf-8').toString('base64'), + updatedAt: Date.now(), + }), + 'utf-8', + ); + const storage = createTransientUnavailableStorage(); + const store = createAnilistTokenStore(filePath, createLogger(), storage); + + assert.equal(store.loadToken(), null); + storage.setAvailable(true); + + assert.equal(store.loadToken(), 'stored-token'); + assert.equal(store.saveToken('new-token'), true); + assert.equal(store.loadToken(), 'new-token'); +}); + test('anilist token store migrates legacy plaintext to encrypted', () => { const filePath = createTempTokenFile(); fs.writeFileSync( diff --git a/src/core/services/anilist/anilist-token-store.ts b/src/core/services/anilist/anilist-token-store.ts index b34ce03b..60cd71f2 100644 --- a/src/core/services/anilist/anilist-token-store.ts +++ b/src/core/services/anilist/anilist-token-store.ts @@ -69,7 +69,6 @@ export function createAnilistTokenStore( `AniList token encryption unavailable: safeStorage.isEncryptionAvailable() is false. ` + `Context: ${getSafeStorageDebugContext()}`, ); - safeStorageUsable = false; return false; } const probe = storage.encryptString('__subminer_anilist_probe__'); @@ -77,7 +76,6 @@ export function createAnilistTokenStore( notifyUser( 'AniList token encryption probe failed: safeStorage.encryptString() returned plaintext bytes.', ); - safeStorageUsable = false; return false; } const roundTrip = storage.decryptString(probe); @@ -85,7 +83,6 @@ export function createAnilistTokenStore( notifyUser( 'AniList token encryption probe failed: encrypt/decrypt round trip returned unexpected content.', ); - safeStorageUsable = false; return false; } safeStorageUsable = true; @@ -96,7 +93,6 @@ export function createAnilistTokenStore( `AniList token encryption unavailable: safeStorage probe threw an error. ` + `Context: ${getSafeStorageDebugContext()}`, ); - safeStorageUsable = false; return false; } }; diff --git a/src/core/services/anilist/anilist-updater.test.ts b/src/core/services/anilist/anilist-updater.test.ts index 7d741a6b..db4109bf 100644 --- a/src/core/services/anilist/anilist-updater.test.ts +++ b/src/core/services/anilist/anilist-updater.test.ts @@ -22,6 +22,44 @@ test('guessAnilistMediaInfo uses guessit output when available', async () => { }); }); +test('guessAnilistMediaInfo fills missing guessit episode from filename parser', async () => { + const result = await guessAnilistMediaInfo('/tmp/Guessit Title S01E09.mkv', null, { + runGuessit: async () => JSON.stringify({ title: 'Guessit Title' }), + }); + assert.deepEqual(result, { + title: 'Guessit Title', + season: 1, + episode: 9, + source: 'guessit', + }); +}); + +test('guessAnilistMediaInfo ignores low-confidence parser details when guessit omits them', async () => { + const result = await guessAnilistMediaInfo('/tmp/Season 2/Guessit Title.mkv', null, { + runGuessit: async () => JSON.stringify({ title: 'Guessit Title' }), + }); + assert.deepEqual(result, { + title: 'Guessit Title', + season: null, + episode: null, + source: 'guessit', + }); +}); + +test('guessAnilistMediaInfo parses Little Witch Academia release filename', async () => { + const filename = + '/tmp/Little Witch Academia (2017) - S01E02 - 002 - Papiliodia [Bluray-1080p][10bit][h265][AC3 2.0][JA].mkv'; + const result = await guessAnilistMediaInfo(filename, null, { + runGuessit: async () => JSON.stringify({ title: 'Little Witch Academia' }), + }); + assert.deepEqual(result, { + title: 'Little Witch Academia', + season: 1, + episode: 2, + source: 'guessit', + }); +}); + test('guessAnilistMediaInfo falls back to parser when guessit fails', async () => { const result = await guessAnilistMediaInfo('/tmp/My Anime S01E03.mkv', null, { runGuessit: async () => { @@ -54,7 +92,7 @@ test('guessAnilistMediaInfo uses basename for guessit input', async () => { ]); assert.deepEqual(result, { title: 'Rascal Does Not Dream of Bunny Girl Senpai', - season: null, + season: 1, episode: 1, source: 'guessit', }); diff --git a/src/core/services/anilist/anilist-updater.ts b/src/core/services/anilist/anilist-updater.ts index 2a2238c4..462e9379 100644 --- a/src/core/services/anilist/anilist-updater.ts +++ b/src/core/services/anilist/anilist-updater.ts @@ -236,12 +236,14 @@ export async function guessAnilistMediaInfo( const season = firstPositiveInteger(parsed.season); const year = firstYear(parsed.year); if (title) { + const fallback = parseMediaInfo(target); + const canUseFallbackDetails = fallback.confidence !== 'low'; return { title: buildGuessitTitle(title, alternativeTitle), ...(alternativeTitle ? { alternativeTitle } : {}), ...(year ? { year } : {}), - season, - episode, + season: season ?? (canUseFallbackDetails ? fallback.season : null), + episode: episode ?? (canUseFallbackDetails ? fallback.episode : null), source: 'guessit', }; } diff --git a/src/core/services/hyprland-window-placement.test.ts b/src/core/services/hyprland-window-placement.test.ts new file mode 100644 index 00000000..7904c52b --- /dev/null +++ b/src/core/services/hyprland-window-placement.test.ts @@ -0,0 +1,200 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { + buildHyprlandPlacementDispatches, + ensureHyprlandWindowFloatingByTitle, + findHyprlandWindowForPlacement, + shouldAttemptHyprlandWindowPlacement, +} from './hyprland-window-placement'; + +test('shouldAttemptHyprlandWindowPlacement only enables on Hyprland Linux sessions', () => { + assert.equal( + shouldAttemptHyprlandWindowPlacement('linux', { + HYPRLAND_INSTANCE_SIGNATURE: 'abc', + }), + true, + ); + assert.equal( + shouldAttemptHyprlandWindowPlacement('linux', { + WAYLAND_DISPLAY: 'wayland-1', + }), + false, + ); + assert.equal( + shouldAttemptHyprlandWindowPlacement('darwin', { + HYPRLAND_INSTANCE_SIGNATURE: 'abc', + }), + false, + ); +}); + +test('findHyprlandWindowForPlacement matches current process by title', () => { + const client = findHyprlandWindowForPlacement( + [ + { + address: '0xother', + pid: 123, + title: 'SubMiner Stats', + mapped: true, + }, + { + address: '0xmatch', + pid: 456, + title: 'SubMiner Stats', + mapped: true, + }, + ], + { + pid: 456, + title: 'SubMiner Stats', + }, + ); + + assert.equal(client?.address, '0xmatch'); +}); + +test('buildHyprlandPlacementDispatches floats tiled overlay windows without pinning them', () => { + assert.deepEqual( + buildHyprlandPlacementDispatches({ + address: '0xabc', + floating: false, + pinned: false, + }), + [['dispatch', 'setfloating', 'address:0xabc']], + ); +}); + +test('buildHyprlandPlacementDispatches force-aligns floating overlay windows to target bounds', () => { + assert.deepEqual( + buildHyprlandPlacementDispatches( + { + address: '0xabc', + floating: true, + pinned: false, + }, + { + x: 0, + y: 0, + width: 1920, + height: 1080, + }, + ), + [ + ['dispatch', 'movewindowpixel', 'exact 0 0,address:0xabc'], + ['dispatch', 'resizewindowpixel', 'exact 1920 1080,address:0xabc'], + ['dispatch', 'setprop', 'address:0xabc rounding 0'], + ['dispatch', 'setprop', 'address:0xabc border_size 0'], + ['dispatch', 'setprop', 'address:0xabc no_shadow 1'], + ['dispatch', 'setprop', 'address:0xabc no_blur 1'], + ['dispatch', 'setprop', 'address:0xabc decorate 0'], + ], + ); +}); + +test('buildHyprlandPlacementDispatches does not pin already floating overlay windows', () => { + assert.deepEqual( + buildHyprlandPlacementDispatches({ + address: '0xabc', + floating: true, + pinned: false, + }), + [], + ); +}); + +test('buildHyprlandPlacementDispatches unpins previously pinned overlay windows', () => { + assert.deepEqual( + buildHyprlandPlacementDispatches({ + address: '0xabc', + floating: true, + pinned: true, + }), + [['dispatch', 'pin', 'address:0xabc']], + ); +}); + +test('ensureHyprlandWindowFloatingByTitle dispatches float-only placement for matching tiled window', () => { + const calls: unknown[][] = []; + const placed = ensureHyprlandWindowFloatingByTitle({ + title: 'SubMiner Stats', + platform: 'linux', + env: { + HYPRLAND_INSTANCE_SIGNATURE: 'abc', + }, + pid: 456, + execFileSync: ((command: string, args: string[], options: unknown) => { + calls.push([command, args, options]); + if (args.join(' ') === '-j clients') { + return JSON.stringify([ + { + address: '0xmatch', + pid: 456, + title: 'SubMiner Stats', + mapped: true, + floating: false, + pinned: false, + }, + ]); + } + return ''; + }) as never, + }); + + assert.equal(placed, true); + assert.deepEqual( + calls.map(([, args]) => args), + [ + ['-j', 'clients'], + ['dispatch', 'setfloating', 'address:0xmatch'], + ], + ); +}); + +test('ensureHyprlandWindowFloatingByTitle dispatches exact Hyprland geometry when bounds are provided', () => { + const calls: unknown[][] = []; + const placed = ensureHyprlandWindowFloatingByTitle({ + title: 'SubMiner Stats', + platform: 'linux', + env: { + HYPRLAND_INSTANCE_SIGNATURE: 'abc', + }, + pid: 456, + bounds: { + x: 0, + y: 0, + width: 1920, + height: 1080, + }, + execFileSync: ((command: string, args: string[], options: unknown) => { + calls.push([command, args, options]); + if (args.join(' ') === '-j clients') { + return JSON.stringify([ + { + address: '0xmatch', + pid: 456, + title: 'SubMiner Stats', + mapped: true, + floating: true, + pinned: false, + }, + ]); + } + return ''; + }) as never, + }); + + assert.equal(placed, true); + assert.deepEqual( + calls.map(([, args]) => args), + [ + ['-j', 'clients'], + ['dispatch', 'movewindowpixel', 'exact 0 0,address:0xmatch'], + ['dispatch', 'resizewindowpixel', 'exact 1920 1080,address:0xmatch'], + ['dispatch', 'setprop', 'address:0xmatch rounding 0'], + ['dispatch', 'setprop', 'address:0xmatch border_size 0'], + ['dispatch', 'setprop', 'address:0xmatch no_shadow 1'], + ['dispatch', 'setprop', 'address:0xmatch no_blur 1'], + ['dispatch', 'setprop', 'address:0xmatch decorate 0'], + ], + ); +}); diff --git a/src/core/services/hyprland-window-placement.ts b/src/core/services/hyprland-window-placement.ts new file mode 100644 index 00000000..5782933c --- /dev/null +++ b/src/core/services/hyprland-window-placement.ts @@ -0,0 +1,156 @@ +import { execFileSync } from 'node:child_process'; + +export interface HyprlandPlacementClient { + address?: string; + floating?: boolean; + hidden?: boolean; + initialTitle?: string; + mapped?: boolean; + pid?: number; + pinned?: boolean; + title?: string; +} + +export interface HyprlandPlacementBounds { + x: number; + y: number; + width: number; + height: number; +} + +type ExecFileSync = typeof execFileSync; + +export function shouldAttemptHyprlandWindowPlacement( + platform: NodeJS.Platform = process.platform, + env: NodeJS.ProcessEnv = process.env, +): boolean { + return platform === 'linux' && Boolean(env.HYPRLAND_INSTANCE_SIGNATURE); +} + +function parseHyprlandClients(output: string): HyprlandPlacementClient[] { + const payloadStart = output.indexOf('['); + if (payloadStart < 0) { + return []; + } + + const parsed = JSON.parse(output.slice(payloadStart)) as unknown; + return Array.isArray(parsed) ? (parsed as HyprlandPlacementClient[]) : []; +} + +export function findHyprlandWindowForPlacement( + clients: HyprlandPlacementClient[], + options: { + pid: number; + title: string; + }, +): HyprlandPlacementClient | null { + const title = options.title.trim(); + if (!title) { + return null; + } + + return ( + clients.find( + (client) => + client.pid === options.pid && + client.address && + client.mapped !== false && + client.hidden !== true && + (client.title === title || client.initialTitle === title), + ) ?? null + ); +} + +export function buildHyprlandPlacementDispatches( + client: HyprlandPlacementClient, + bounds?: HyprlandPlacementBounds | null, +): string[][] { + if (!client.address) { + return []; + } + + const windowAddress = `address:${client.address}`; + const dispatches: string[][] = []; + if (client.floating !== true) { + dispatches.push(['dispatch', 'setfloating', windowAddress]); + } + if (client.pinned === true) { + dispatches.push(['dispatch', 'pin', windowAddress]); + } + const roundedBounds = roundPlacementBounds(bounds); + if (roundedBounds) { + dispatches.push([ + 'dispatch', + 'movewindowpixel', + `exact ${roundedBounds.x} ${roundedBounds.y},${windowAddress}`, + ]); + dispatches.push([ + 'dispatch', + 'resizewindowpixel', + `exact ${roundedBounds.width} ${roundedBounds.height},${windowAddress}`, + ]); + dispatches.push(['dispatch', 'setprop', `${windowAddress} rounding 0`]); + dispatches.push(['dispatch', 'setprop', `${windowAddress} border_size 0`]); + dispatches.push(['dispatch', 'setprop', `${windowAddress} no_shadow 1`]); + dispatches.push(['dispatch', 'setprop', `${windowAddress} no_blur 1`]); + dispatches.push(['dispatch', 'setprop', `${windowAddress} decorate 0`]); + } + return dispatches; +} + +function roundPlacementBounds( + bounds?: HyprlandPlacementBounds | null, +): HyprlandPlacementBounds | null { + if (!bounds) { + return null; + } + const rounded = { + x: Math.round(bounds.x), + y: Math.round(bounds.y), + width: Math.round(bounds.width), + height: Math.round(bounds.height), + }; + return Number.isFinite(rounded.x) && + Number.isFinite(rounded.y) && + Number.isFinite(rounded.width) && + Number.isFinite(rounded.height) && + rounded.width > 0 && + rounded.height > 0 + ? rounded + : null; +} + +export function ensureHyprlandWindowFloatingByTitle(options: { + title: string; + bounds?: HyprlandPlacementBounds | null; + platform?: NodeJS.Platform; + env?: NodeJS.ProcessEnv; + pid?: number; + execFileSync?: ExecFileSync; +}): boolean { + if (!shouldAttemptHyprlandWindowPlacement(options.platform, options.env)) { + return false; + } + + const run = options.execFileSync ?? execFileSync; + try { + const clients = parseHyprlandClients( + String(run('hyprctl', ['-j', 'clients'], { encoding: 'utf-8' })), + ); + const client = findHyprlandWindowForPlacement(clients, { + pid: options.pid ?? process.pid, + title: options.title, + }); + if (!client) { + return false; + } + + const dispatches = buildHyprlandPlacementDispatches(client, options.bounds); + for (const args of dispatches) { + run('hyprctl', args, { stdio: 'ignore' }); + } + return dispatches.length > 0; + } catch { + return false; + } +} diff --git a/src/core/services/immersion-tracker/__tests__/query.test.ts b/src/core/services/immersion-tracker/__tests__/query.test.ts index bf21759e..c45828ce 100644 --- a/src/core/services/immersion-tracker/__tests__/query.test.ts +++ b/src/core/services/immersion-tracker/__tests__/query.test.ts @@ -3050,6 +3050,59 @@ test('anime and media detail prefer lifetime totals over partial retained sessio } }); +test('media detail resolves retained sessions before lifetime summary exists', () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + + try { + ensureSchema(db); + + const videoId = getOrCreateVideoRecord(db, 'local:/tmp/recent-session.mkv', { + canonicalTitle: 'Recent Session Episode', + sourcePath: '/tmp/recent-session.mkv', + sourceUrl: null, + sourceType: SOURCE_TYPE_LOCAL, + }); + const startedAtMs = 1_700_000_000_000; + const { sessionId } = startSessionRecord(db, videoId, startedAtMs); + db.prepare( + ` + UPDATE imm_sessions + SET ended_at_ms = ?, status = 2, active_watched_ms = ?, lines_seen = ?, tokens_seen = ?, cards_mined = ? + WHERE session_id = ? + `, + ).run(startedAtMs + 600_000, 600_000, 100, 990, 1, sessionId); + insertFilteredWordOccurrence(db, { + sessionId, + videoId, + occurrenceCount: 4, + startedAtMs, + }); + + assert.equal(getSessionSummaries(db, 1)[0]?.videoId, videoId); + assert.equal( + ( + db + .prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media WHERE video_id = ?') + .get(videoId) as { total: number } + ).total, + 0, + ); + + const detail = getMediaDetail(db, videoId); + assert.ok(detail); + assert.equal(detail.canonicalTitle, 'Recent Session Episode'); + assert.equal(detail.totalSessions, 1); + assert.equal(detail.totalActiveMs, 600_000); + assert.equal(detail.totalLinesSeen, 100); + assert.equal(detail.totalTokensSeen, 4); + assert.equal(detail.totalCards, 1); + } finally { + db.close(); + cleanupDbPath(dbPath); + } +}); + test('media library and detail queries read lifetime totals', () => { const dbPath = makeDbPath(); const db = new Database(dbPath); diff --git a/src/core/services/immersion-tracker/query-library.ts b/src/core/services/immersion-tracker/query-library.ts index 1202c5a5..69240462 100644 --- a/src/core/services/immersion-tracker/query-library.ts +++ b/src/core/services/immersion-tracker/query-library.ts @@ -243,6 +243,7 @@ export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] { } export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRow | null { + const wordsExpr = sessionDisplayWordsExpr('s', 'swc', 'COALESCE(asm.tokensSeen, s.tokens_seen)'); return db .prepare( ` @@ -251,11 +252,26 @@ export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRo v.video_id AS videoId, v.canonical_title AS canonicalTitle, v.anime_id AS animeId, - COALESCE(lm.total_sessions, 0) AS totalSessions, - COALESCE(lm.total_active_ms, 0) AS totalActiveMs, - COALESCE(lm.total_cards, 0) AS totalCards, - COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, - COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen, + CASE + WHEN lm.video_id IS NOT NULL THEN COALESCE(lm.total_sessions, 0) + ELSE COUNT(DISTINCT s.session_id) + END AS totalSessions, + CASE + WHEN lm.video_id IS NOT NULL THEN COALESCE(lm.total_active_ms, 0) + ELSE COALESCE(SUM(COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0)), 0) + END AS totalActiveMs, + CASE + WHEN lm.video_id IS NOT NULL THEN COALESCE(lm.total_cards, 0) + ELSE COALESCE(SUM(COALESCE(asm.cardsMined, s.cards_mined, 0)), 0) + END AS totalCards, + CASE + WHEN lm.video_id IS NOT NULL THEN COALESCE(lm.total_tokens_seen, 0) + ELSE COALESCE(SUM(${wordsExpr}), 0) + END AS totalTokensSeen, + CASE + WHEN lm.video_id IS NOT NULL THEN COALESCE(lm.total_lines_seen, 0) + ELSE COALESCE(SUM(COALESCE(asm.linesSeen, s.lines_seen, 0)), 0) + END AS totalLinesSeen, COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount, COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits, COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount, @@ -271,11 +287,13 @@ export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRo yv.uploader_url AS uploaderUrl, yv.description AS description FROM imm_videos v - JOIN imm_lifetime_media lm ON lm.video_id = v.video_id + LEFT JOIN imm_lifetime_media lm ON lm.video_id = v.video_id LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id LEFT JOIN imm_sessions s ON s.video_id = v.video_id LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id + LEFT JOIN session_word_counts swc ON swc.sessionId = s.session_id WHERE v.video_id = ? + AND (lm.video_id IS NOT NULL OR s.session_id IS NOT NULL) GROUP BY v.video_id `, ) diff --git a/src/core/services/ipc.test.ts b/src/core/services/ipc.test.ts index 3d9be47d..46e2ec80 100644 --- a/src/core/services/ipc.test.ts +++ b/src/core/services/ipc.test.ts @@ -302,6 +302,86 @@ test('createIpcDepsRuntime wires AniList handlers', async () => { assert.equal(deps.getPlaybackPaused(), true); }); +test('registerIpcHandlers runs AniList update after manual mark watched succeeds', async () => { + const { registrar, handlers } = createFakeIpcRegistrar(); + const calls: string[] = []; + registerIpcHandlers( + createRegisterIpcDeps({ + immersionTracker: createFakeImmersionTracker({ + markActiveVideoWatched: async () => { + calls.push('mark'); + return true; + }, + }), + runAnilistPostWatchUpdateOnManualMark: async () => { + calls.push('anilist'); + }, + }), + registrar, + ); + + const result = await handlers.handle.get(IPC_CHANNELS.command.markActiveVideoWatched)?.({}); + + assert.equal(result, true); + assert.deepEqual(calls, ['mark', 'anilist']); +}); + +test('registerIpcHandlers isolates AniList update failures after manual mark watched succeeds', async () => { + const { registrar, handlers } = createFakeIpcRegistrar(); + const calls: string[] = []; + const originalWarn = console.warn; + console.warn = () => undefined; + + try { + registerIpcHandlers( + createRegisterIpcDeps({ + immersionTracker: createFakeImmersionTracker({ + markActiveVideoWatched: async () => { + calls.push('mark'); + return true; + }, + }), + runAnilistPostWatchUpdateOnManualMark: async () => { + calls.push('anilist'); + throw new Error('post-watch failed'); + }, + }), + registrar, + ); + + const result = await handlers.handle.get(IPC_CHANNELS.command.markActiveVideoWatched)?.({}); + + assert.equal(result, true); + assert.deepEqual(calls, ['mark', 'anilist']); + } finally { + console.warn = originalWarn; + } +}); + +test('registerIpcHandlers skips AniList update when manual mark watched has no active session', async () => { + const { registrar, handlers } = createFakeIpcRegistrar(); + const calls: string[] = []; + registerIpcHandlers( + createRegisterIpcDeps({ + immersionTracker: createFakeImmersionTracker({ + markActiveVideoWatched: async () => { + calls.push('mark'); + return false; + }, + }), + runAnilistPostWatchUpdateOnManualMark: async () => { + calls.push('anilist'); + }, + }), + registrar, + ); + + const result = await handlers.handle.get(IPC_CHANNELS.command.markActiveVideoWatched)?.({}); + + assert.equal(result, false); + assert.deepEqual(calls, ['mark']); +}); + test('registerIpcHandlers exposes playlist browser snapshot and mutations', async () => { const { registrar, handlers } = createFakeIpcRegistrar(); const calls: Array<[string, unknown[]]> = []; diff --git a/src/core/services/ipc.ts b/src/core/services/ipc.ts index 690bda20..89c04da1 100644 --- a/src/core/services/ipc.ts +++ b/src/core/services/ipc.ts @@ -90,6 +90,7 @@ export interface IpcServiceDeps { openAnilistSetup: () => void; getAnilistQueueStatus: () => unknown; retryAnilistQueueNow: () => Promise<{ ok: boolean; message: string }>; + runAnilistPostWatchUpdateOnManualMark?: () => Promise; getCharacterDictionarySelection?: () => Promise; setCharacterDictionarySelection?: (mediaId: number) => Promise; appendClipboardVideoToQueue: () => { ok: boolean; message: string }; @@ -213,6 +214,7 @@ export interface IpcDepsRuntimeOptions { openAnilistSetup: () => void; getAnilistQueueStatus: () => unknown; retryAnilistQueueNow: () => Promise<{ ok: boolean; message: string }>; + runAnilistPostWatchUpdateOnManualMark?: () => Promise; getCharacterDictionarySelection?: () => Promise; setCharacterDictionarySelection?: (mediaId: number) => Promise; appendClipboardVideoToQueue: () => { ok: boolean; message: string }; @@ -288,6 +290,7 @@ export function createIpcDepsRuntime(options: IpcDepsRuntimeOptions): IpcService openAnilistSetup: options.openAnilistSetup, getAnilistQueueStatus: options.getAnilistQueueStatus, retryAnilistQueueNow: options.retryAnilistQueueNow, + runAnilistPostWatchUpdateOnManualMark: options.runAnilistPostWatchUpdateOnManualMark, getCharacterDictionarySelection: options.getCharacterDictionarySelection ?? (async () => ({ @@ -385,7 +388,18 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar }); ipc.handle(IPC_CHANNELS.command.markActiveVideoWatched, async () => { - return (await deps.immersionTracker?.markActiveVideoWatched()) ?? false; + const marked = (await deps.immersionTracker?.markActiveVideoWatched()) ?? false; + if (marked) { + try { + await deps.runAnilistPostWatchUpdateOnManualMark?.(); + } catch (error) { + console.warn( + 'Failed to run AniList post-watch update after manual watched mark:', + (error as Error).message, + ); + } + } + return marked; }); ipc.on(IPC_CHANNELS.command.quitApp, () => { diff --git a/src/core/services/mpv-properties.ts b/src/core/services/mpv-properties.ts index e8b81cef..8b5272f3 100644 --- a/src/core/services/mpv-properties.ts +++ b/src/core/services/mpv-properties.ts @@ -59,6 +59,7 @@ const MPV_SUBTITLE_PROPERTY_OBSERVATIONS: string[] = [ 'sub-ass-override', 'sub-use-margins', 'pause', + 'fullscreen', 'duration', 'media-title', 'secondary-sub-visibility', diff --git a/src/core/services/mpv-protocol.test.ts b/src/core/services/mpv-protocol.test.ts index 5c3756b7..b75485a3 100644 --- a/src/core/services/mpv-protocol.test.ts +++ b/src/core/services/mpv-protocol.test.ts @@ -93,6 +93,7 @@ function createDeps(overrides: Partial = {}): { emitTimePosChange: () => {}, emitDurationChange: () => {}, emitPauseChange: () => {}, + emitFullscreenChange: (payload) => state.events.push(payload), autoLoadSecondarySubTrack: () => {}, setCurrentVideoPath: () => {}, emitSecondarySubtitleVisibility: (payload) => state.events.push(payload), @@ -160,6 +161,17 @@ test('dispatchMpvProtocolMessage enforces sub-visibility hidden when overlay sup ]); }); +test('dispatchMpvProtocolMessage emits fullscreen changes', async () => { + const { deps, state } = createDeps(); + + await dispatchMpvProtocolMessage( + { event: 'property-change', name: 'fullscreen', data: true }, + deps, + ); + + assert.deepEqual(state.events, [{ fullscreen: true }]); +}); + test('dispatchMpvProtocolMessage skips sub-visibility suppression when overlay is hidden', async () => { const { deps, state } = createDeps({ isVisibleOverlayVisible: () => false, @@ -269,6 +281,25 @@ test('dispatchMpvProtocolMessage pauses on sub-end when pendingPauseAtSubEnd is }); }); +test('dispatchMpvProtocolMessage updates current time before emitting time-pos change', async () => { + const calls: string[] = []; + let currentTimePos = 0; + const { deps } = createDeps({ + setCurrentTimePos: (time) => { + currentTimePos = time; + calls.push(`set:${time}`); + }, + getCurrentTimePos: () => currentTimePos, + emitTimePosChange: ({ time }) => { + calls.push(`emit:${time}:current=${currentTimePos}`); + }, + }); + + await dispatchMpvProtocolMessage({ event: 'property-change', name: 'time-pos', data: 90 }, deps); + + assert.deepEqual(calls, ['set:90', 'emit:90:current=90']); +}); + test('splitMpvMessagesFromBuffer parses complete lines and preserves partial buffer', () => { const parsed = splitMpvMessagesFromBuffer( '{"event":"shutdown"}\n{"event":"property-change","name":"media-title","data":"x"}\n{"partial"', diff --git a/src/core/services/mpv-protocol.ts b/src/core/services/mpv-protocol.ts index b79dbbd9..3751a17e 100644 --- a/src/core/services/mpv-protocol.ts +++ b/src/core/services/mpv-protocol.ts @@ -65,6 +65,7 @@ export interface MpvProtocolHandleMessageDeps { emitTimePosChange: (payload: { time: number }) => void; emitDurationChange: (payload: { duration: number }) => void; emitPauseChange: (payload: { paused: boolean }) => void; + emitFullscreenChange: (payload: { fullscreen: boolean }) => void; emitSubtitleMetricsChange: (payload: Partial) => void; setCurrentSecondarySubText: (text: string) => void; resolvePendingRequest: (requestId: number, message: MpvMessage) => boolean; @@ -275,8 +276,9 @@ export async function dispatchMpvProtocolMessage( deps.setCurrentAudioTrackId(typeof msg.data === 'number' ? (msg.data as number) : null); deps.syncCurrentAudioStreamIndex(); } else if (msg.name === 'time-pos') { - deps.emitTimePosChange({ time: (msg.data as number) || 0 }); - deps.setCurrentTimePos((msg.data as number) || 0); + const timePos = (msg.data as number) || 0; + deps.setCurrentTimePos(timePos); + deps.emitTimePosChange({ time: timePos }); if ( deps.getPauseAtTime() !== null && deps.getCurrentTimePos() >= (deps.getPauseAtTime() as number) @@ -291,6 +293,8 @@ export async function dispatchMpvProtocolMessage( } } else if (msg.name === 'pause') { deps.emitPauseChange({ paused: asBoolean(msg.data, false) }); + } else if (msg.name === 'fullscreen') { + deps.emitFullscreenChange({ fullscreen: asBoolean(msg.data, false) }); } else if (msg.name === 'media-title') { deps.emitMediaTitleChange({ title: typeof msg.data === 'string' ? msg.data.trim() : null, diff --git a/src/core/services/mpv.test.ts b/src/core/services/mpv.test.ts index e9b5b2a5..8b949a14 100644 --- a/src/core/services/mpv.test.ts +++ b/src/core/services/mpv.test.ts @@ -57,6 +57,22 @@ test('MpvIpcClient handles sub-text property change and broadcasts tokenized sub assert.equal(events[0]!.isOverlayVisible, false); }); +test('MpvIpcClient emits fullscreen property changes', async () => { + const events: Array<{ fullscreen: boolean }> = []; + const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps()); + client.on('fullscreen-change', (payload) => { + events.push(payload); + }); + + await invokeHandleMessage(client, { + event: 'property-change', + name: 'fullscreen', + data: true, + }); + + assert.deepEqual(events, [{ fullscreen: true }]); +}); + test('MpvIpcClient clears cached media title when media path changes', async () => { const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps()); @@ -473,7 +489,7 @@ test('MpvIpcClient updates current audio stream index from track list', async () assert.equal(client.currentAudioStreamIndex, 11); }); -test('MpvIpcClient playNextSubtitle preserves a manual paused state', async () => { +test('MpvIpcClient playNextSubtitle starts playback from paused state and auto-pauses at end', async () => { const commands: unknown[] = []; const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps()); (client as any).send = (payload: unknown) => { @@ -491,9 +507,29 @@ test('MpvIpcClient playNextSubtitle preserves a manual paused state', async () = client.playNextSubtitle(); - assert.equal((client as any).pendingPauseAtSubEnd, false); + assert.equal((client as any).pendingPauseAtSubEnd, true); assert.equal((client as any).pauseAtTime, null); - assert.deepEqual(commands, [{ command: ['sub-seek', 1] }]); + assert.deepEqual(commands, [ + { command: ['sub-seek', 1] }, + { command: ['set_property', 'pause', false] }, + ]); +}); + +test('MpvIpcClient playNextSubtitle starts playback when pause state is unknown', () => { + const commands: unknown[] = []; + const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps()); + (client as any).send = (payload: unknown) => { + commands.push(payload); + return true; + }; + + client.playNextSubtitle(); + + assert.equal((client as any).pendingPauseAtSubEnd, true); + assert.deepEqual(commands, [ + { command: ['sub-seek', 1] }, + { command: ['set_property', 'pause', false] }, + ]); }); test('MpvIpcClient playNextSubtitle still auto-pauses at end while already playing', async () => { diff --git a/src/core/services/mpv.ts b/src/core/services/mpv.ts index 7e5df317..9525c43a 100644 --- a/src/core/services/mpv.ts +++ b/src/core/services/mpv.ts @@ -119,6 +119,7 @@ export interface MpvIpcClientEventMap { 'time-pos-change': { time: number }; 'duration-change': { duration: number }; 'pause-change': { paused: boolean }; + 'fullscreen-change': { fullscreen: boolean }; 'secondary-subtitle-change': { text: string }; 'subtitle-track-change': { sid: number | null }; 'subtitle-track-list-change': { trackList: unknown[] | null }; @@ -330,6 +331,9 @@ export class MpvIpcClient implements MpvClient { this.playbackPaused = payload.paused; this.emit('pause-change', payload); }, + emitFullscreenChange: (payload) => { + this.emit('fullscreen-change', payload); + }, emitSecondarySubtitleChange: (payload) => { this.emit('secondary-subtitle-change', payload); }, @@ -518,14 +522,12 @@ export class MpvIpcClient implements MpvClient { } playNextSubtitle(): void { - if (this.playbackPaused === true) { - this.pendingPauseAtSubEnd = false; - this.pauseAtTime = null; - this.send({ command: ['sub-seek', 1] }); - return; - } this.pendingPauseAtSubEnd = true; + this.pauseAtTime = null; this.send({ command: ['sub-seek', 1] }); + if (this.playbackPaused !== false) { + this.send({ command: ['set_property', 'pause', false] }); + } } restorePreviousSecondarySubVisibility(): void { diff --git a/src/core/services/overlay-manager.test.ts b/src/core/services/overlay-manager.test.ts index 6f1ca42e..d557b633 100644 --- a/src/core/services/overlay-manager.test.ts +++ b/src/core/services/overlay-manager.test.ts @@ -77,6 +77,7 @@ test('overlay manager applies bounds for main and modal windows', () => { const visibleCalls: Electron.Rectangle[] = []; const visibleWindow = { isDestroyed: () => false, + getTitle: () => 'SubMiner Overlay', setBounds: (bounds: Electron.Rectangle) => { visibleCalls.push(bounds); }, @@ -84,6 +85,7 @@ test('overlay manager applies bounds for main and modal windows', () => { const modalCalls: Electron.Rectangle[] = []; const modalWindow = { isDestroyed: () => false, + getTitle: () => 'SubMiner Overlay Modal', setBounds: (bounds: Electron.Rectangle) => { modalCalls.push(bounds); }, diff --git a/src/core/services/overlay-visibility.test.ts b/src/core/services/overlay-visibility.test.ts index c265a07b..8c3a1507 100644 --- a/src/core/services/overlay-visibility.test.ts +++ b/src/core/services/overlay-visibility.test.ts @@ -883,7 +883,7 @@ test('visible overlay stays hidden while a modal window is active', () => { assert.ok(!calls.includes('update-bounds')); }); -test('macOS tracked visible overlay stays click-through without passively stealing focus', () => { +test('macOS tracked visible overlay stays interactive without passively stealing focus', () => { const { window, calls } = createMainWindowRecorder(); const tracker: WindowTrackerStub = { isTracking: () => true, @@ -915,11 +915,158 @@ test('macOS tracked visible overlay stays click-through without passively steali isWindowsPlatform: false, } as never); - assert.ok(calls.includes('mouse-ignore:true:forward')); + assert.ok(calls.includes('mouse-ignore:false:plain')); assert.ok(calls.includes('show')); assert.ok(!calls.includes('focus')); }); +test('macOS keeps active mpv overlay visible and interactive during tracker refresh', () => { + const { window, calls } = createMainWindowRecorder(); + const osdMessages: string[] = []; + const tracker: WindowTrackerStub = { + isTracking: () => true, + getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }), + isTargetWindowFocused: () => true, + }; + + updateVisibleOverlayVisibility({ + visibleOverlayVisible: true, + mainWindow: window as never, + windowTracker: tracker as never, + trackerNotReadyWarningShown: false, + setTrackerNotReadyWarningShown: () => { + calls.push('tracker-warning'); + }, + updateVisibleOverlayBounds: () => { + calls.push('update-bounds'); + }, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + syncPrimaryOverlayWindowLayer: () => { + calls.push('sync-layer'); + }, + enforceOverlayLayerOrder: () => { + calls.push('enforce-order'); + }, + syncOverlayShortcuts: () => { + calls.push('sync-shortcuts'); + }, + isMacOSPlatform: true, + isWindowsPlatform: false, + showOverlayLoadingOsd: (message: string) => { + osdMessages.push(message); + }, + } as never); + + assert.ok(calls.includes('update-bounds')); + assert.ok(calls.includes('sync-layer')); + assert.ok(calls.includes('mouse-ignore:false:plain')); + assert.ok(calls.includes('ensure-level')); + assert.ok(calls.includes('enforce-order')); + assert.ok(calls.includes('sync-shortcuts')); + assert.ok(!calls.includes('hide')); + assert.deepEqual(osdMessages, []); +}); + +test('macOS tracked overlay releases topmost level when mpv loses foreground', () => { + const { window, calls } = createMainWindowRecorder(); + const tracker: WindowTrackerStub = { + isTracking: () => true, + getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }), + isTargetWindowFocused: () => false, + }; + + updateVisibleOverlayVisibility({ + visibleOverlayVisible: true, + mainWindow: window as never, + windowTracker: tracker as never, + trackerNotReadyWarningShown: false, + setTrackerNotReadyWarningShown: () => {}, + updateVisibleOverlayBounds: () => { + calls.push('update-bounds'); + }, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + syncPrimaryOverlayWindowLayer: () => { + calls.push('sync-layer'); + }, + enforceOverlayLayerOrder: () => { + calls.push('enforce-order'); + }, + syncOverlayShortcuts: () => { + calls.push('sync-shortcuts'); + }, + isMacOSPlatform: true, + isWindowsPlatform: false, + } as never); + + assert.ok(calls.includes('update-bounds')); + assert.ok(calls.includes('sync-layer')); + assert.ok(calls.includes('mouse-ignore:true:forward')); + assert.ok(calls.includes('always-on-top:false')); + assert.ok(calls.includes('show')); + assert.ok(calls.includes('sync-shortcuts')); + assert.ok(!calls.includes('ensure-level')); + assert.ok(!calls.includes('enforce-order')); + assert.ok(!calls.includes('focus')); + assert.ok(!calls.includes('hide')); +}); + +test('macOS preserves an already visible active mpv overlay while tracker is temporarily not ready', () => { + const { window, calls } = createMainWindowRecorder(); + const osdMessages: string[] = []; + let trackerWarning = false; + const tracker: WindowTrackerStub = { + isTracking: () => false, + getGeometry: () => null, + isTargetWindowFocused: () => true, + }; + + window.show(); + calls.length = 0; + + updateVisibleOverlayVisibility({ + visibleOverlayVisible: true, + mainWindow: window as never, + windowTracker: tracker as never, + trackerNotReadyWarningShown: trackerWarning, + setTrackerNotReadyWarningShown: (shown: boolean) => { + trackerWarning = shown; + calls.push(`tracker-warning:${shown}`); + }, + updateVisibleOverlayBounds: () => { + calls.push('update-bounds'); + }, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + syncPrimaryOverlayWindowLayer: () => { + calls.push('sync-layer'); + }, + enforceOverlayLayerOrder: () => { + calls.push('enforce-order'); + }, + syncOverlayShortcuts: () => { + calls.push('sync-shortcuts'); + }, + isMacOSPlatform: true, + isWindowsPlatform: false, + showOverlayLoadingOsd: (message: string) => { + osdMessages.push(message); + }, + } as never); + + assert.equal(trackerWarning, false); + assert.ok(calls.includes('sync-layer')); + assert.ok(calls.includes('mouse-ignore:false:plain')); + assert.ok(calls.includes('ensure-level')); + assert.ok(calls.includes('sync-shortcuts')); + assert.ok(!calls.includes('hide')); + assert.deepEqual(osdMessages, []); +}); + test('forced mouse passthrough keeps macOS tracked overlay passive while visible', () => { const { window, calls } = createMainWindowRecorder(); const tracker: WindowTrackerStub = { @@ -1192,6 +1339,65 @@ test('macOS keeps visible overlay hidden while tracker is not initialized yet', assert.ok(!calls.includes('update-bounds')); }); +test('macOS preserves visible overlay during transient tracker loss with retained geometry', () => { + const { window, calls } = createMainWindowRecorder(); + const osdMessages: string[] = []; + let trackerWarning = false; + let tracking = true; + const tracker: WindowTrackerStub = { + isTracking: () => tracking, + getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }), + isTargetWindowFocused: () => true, + }; + + const run = () => + updateVisibleOverlayVisibility({ + visibleOverlayVisible: true, + mainWindow: window as never, + windowTracker: tracker as never, + trackerNotReadyWarningShown: trackerWarning, + setTrackerNotReadyWarningShown: (shown: boolean) => { + trackerWarning = shown; + }, + updateVisibleOverlayBounds: () => { + calls.push('update-bounds'); + }, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + syncPrimaryOverlayWindowLayer: () => { + calls.push('sync-layer'); + }, + enforceOverlayLayerOrder: () => { + calls.push('enforce-order'); + }, + syncOverlayShortcuts: () => { + calls.push('sync-shortcuts'); + }, + isMacOSPlatform: true, + showOverlayLoadingOsd: (message: string) => { + osdMessages.push(message); + }, + } as never); + + run(); + calls.length = 0; + tracking = false; + + run(); + + assert.equal(trackerWarning, false); + assert.deepEqual(osdMessages, []); + assert.ok(calls.includes('update-bounds')); + assert.ok(calls.includes('sync-layer')); + assert.ok(calls.includes('mouse-ignore:false:plain')); + assert.ok(calls.includes('ensure-level')); + assert.ok(calls.includes('enforce-order')); + assert.ok(calls.includes('sync-shortcuts')); + assert.ok(!calls.includes('hide')); + assert.ok(!calls.includes('show')); +}); + test('macOS suppresses immediate repeat loading OSD after tracker recovery until cooldown expires', () => { const { window } = createMainWindowRecorder(); const osdMessages: string[] = []; diff --git a/src/core/services/overlay-visibility.ts b/src/core/services/overlay-visibility.ts index 768dcc61..2a0db059 100644 --- a/src/core/services/overlay-visibility.ts +++ b/src/core/services/overlay-visibility.ts @@ -89,13 +89,22 @@ export function updateVisibleOverlayVisibility(args: { return; } - const showPassiveVisibleOverlay = (): void => { + const showPassiveVisibleOverlay = (): boolean => { const forceMousePassthrough = args.forceMousePassthrough === true; const wasVisible = mainWindow.isVisible(); - const shouldDefaultToPassthrough = - args.isMacOSPlatform || args.isWindowsPlatform || forceMousePassthrough; const isVisibleOverlayFocused = typeof mainWindow.isFocused === 'function' && mainWindow.isFocused(); + const isTrackedMacOSTargetFocused = + !args.isMacOSPlatform || !args.windowTracker + ? true + : (args.windowTracker.isTargetWindowFocused?.() ?? true); + const shouldReleaseMacOSOverlayLevel = + args.isMacOSPlatform && + !!args.windowTracker && + !isVisibleOverlayFocused && + !isTrackedMacOSTargetFocused; + const shouldDefaultToPassthrough = + args.isWindowsPlatform || forceMousePassthrough || shouldReleaseMacOSOverlayLevel; const windowsForegroundProcessName = args.lastKnownWindowsForegroundProcessName?.trim().toLowerCase() ?? null; const windowsOverlayProcessName = args.windowsOverlayProcessName?.trim().toLowerCase() ?? null; @@ -138,7 +147,7 @@ export function updateVisibleOverlayVisibility(args: { // On Windows, z-order is enforced by the OS via the owner window mechanism // (SetWindowLongPtr GWLP_HWNDPARENT). The overlay is always above mpv // without any manual z-order management. - } else if (!forceMousePassthrough) { + } else if (!forceMousePassthrough && !shouldReleaseMacOSOverlayLevel) { args.ensureOverlayWindowLevel(mainWindow); } else { mainWindow.setAlwaysOnTop(false); @@ -187,6 +196,8 @@ export function updateVisibleOverlayVisibility(args: { if (!args.isWindowsPlatform && !args.isMacOSPlatform && !forceMousePassthrough) { mainWindow.focus(); } + + return !shouldReleaseMacOSOverlayLevel; }; const maybeShowOverlayLoadingOsd = (): void => { @@ -230,8 +241,8 @@ export function updateVisibleOverlayVisibility(args: { args.updateVisibleOverlayBounds(geometry); } args.syncPrimaryOverlayWindowLayer('visible'); - showPassiveVisibleOverlay(); - if (!args.forceMousePassthrough && !args.isWindowsPlatform) { + const shouldEnforceLayerOrder = showPassiveVisibleOverlay(); + if (shouldEnforceLayerOrder && !args.forceMousePassthrough && !args.isWindowsPlatform) { args.enforceOverlayLayerOrder(); } args.syncOverlayShortcuts(); @@ -260,11 +271,19 @@ export function updateVisibleOverlayVisibility(args: { return; } + const hasRetainedTrackedGeometry = args.windowTracker.getGeometry() !== null; + const hasActiveMacOSTargetSignal = + args.isMacOSPlatform && (args.windowTracker.isTargetWindowFocused?.() ?? false); + const shouldPreserveTransientTrackedOverlay = + (args.isMacOSPlatform && + (hasRetainedTrackedGeometry || (mainWindow.isVisible() && hasActiveMacOSTargetSignal))) || + (args.isWindowsPlatform && + typeof args.windowTracker.isTargetWindowMinimized === 'function' && + !args.windowTracker.isTargetWindowMinimized()); + if ( - args.isWindowsPlatform && - typeof args.windowTracker.isTargetWindowMinimized === 'function' && - !args.windowTracker.isTargetWindowMinimized() && - (mainWindow.isVisible() || args.windowTracker.getGeometry() !== null) + shouldPreserveTransientTrackedOverlay && + (mainWindow.isVisible() || hasRetainedTrackedGeometry) ) { args.setTrackerNotReadyWarningShown(false); const geometry = args.windowTracker.getGeometry(); @@ -272,7 +291,10 @@ export function updateVisibleOverlayVisibility(args: { args.updateVisibleOverlayBounds(geometry); } args.syncPrimaryOverlayWindowLayer('visible'); - showPassiveVisibleOverlay(); + const shouldEnforceLayerOrder = showPassiveVisibleOverlay(); + if (shouldEnforceLayerOrder && !args.forceMousePassthrough && !args.isWindowsPlatform) { + args.enforceOverlayLayerOrder(); + } args.syncOverlayShortcuts(); return; } diff --git a/src/core/services/overlay-window-config.test.ts b/src/core/services/overlay-window-config.test.ts index fda73465..33234bf9 100644 --- a/src/core/services/overlay-window-config.test.ts +++ b/src/core/services/overlay-window-config.test.ts @@ -8,6 +8,7 @@ test('overlay window config explicitly disables renderer sandbox for preload com yomitanSession: null, }); + assert.equal(options.title, 'SubMiner Overlay'); assert.equal(options.backgroundColor, '#00000000'); assert.equal(options.webPreferences?.sandbox, false); assert.equal(options.webPreferences?.backgroundThrottling, false); diff --git a/src/core/services/overlay-window-input.ts b/src/core/services/overlay-window-input.ts index 44f0ab59..54e0c1b7 100644 --- a/src/core/services/overlay-window-input.ts +++ b/src/core/services/overlay-window-input.ts @@ -69,10 +69,14 @@ export function handleOverlayWindowBlurred(options: { onWindowsVisibleOverlayBlur?: () => void; platform?: NodeJS.Platform; }): boolean { - if ((options.platform ?? process.platform) === 'win32' && options.kind === 'visible') { + const platform = options.platform ?? process.platform; + if (platform === 'win32' && options.kind === 'visible') { options.onWindowsVisibleOverlayBlur?.(); return false; } + if (platform === 'darwin' && options.kind === 'visible') { + return false; + } if (options.kind === 'visible' && !options.isOverlayVisible(options.kind)) { return false; diff --git a/src/core/services/overlay-window-options.ts b/src/core/services/overlay-window-options.ts index 146373a3..bd69a529 100644 --- a/src/core/services/overlay-window-options.ts +++ b/src/core/services/overlay-window-options.ts @@ -2,6 +2,11 @@ import type { BrowserWindowConstructorOptions, Session } from 'electron'; import * as path from 'path'; import type { OverlayWindowKind } from './overlay-window-input'; +export const OVERLAY_WINDOW_TITLES: Record = { + visible: 'SubMiner Overlay', + modal: 'SubMiner Overlay Modal', +}; + export function buildOverlayWindowOptions( kind: OverlayWindowKind, options: { @@ -14,6 +19,7 @@ export function buildOverlayWindowOptions( return { show: false, + title: OVERLAY_WINDOW_TITLES[kind], width: 800, height: 600, x: 0, diff --git a/src/core/services/overlay-window.test.ts b/src/core/services/overlay-window.test.ts index 4695a52d..b69e4346 100644 --- a/src/core/services/overlay-window.test.ts +++ b/src/core/services/overlay-window.test.ts @@ -146,6 +146,49 @@ test('handleOverlayWindowBlurred notifies Windows visible overlay blur callback assert.deepEqual(calls, ['windows-visible-blur']); }); +test('handleOverlayWindowBlurred skips macOS visible overlay restacking after focus loss', () => { + const calls: string[] = []; + + const handled = handleOverlayWindowBlurred({ + kind: 'visible', + windowVisible: true, + isOverlayVisible: () => true, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + moveWindowTop: () => { + calls.push('move-top'); + }, + platform: 'darwin', + }); + + assert.equal(handled, false); + assert.deepEqual(calls, []); +}); + +test('handleOverlayWindowBlurred leaves Windows callback inactive on macOS visible overlay blur', () => { + const calls: string[] = []; + + const handled = handleOverlayWindowBlurred({ + kind: 'visible', + windowVisible: true, + isOverlayVisible: () => true, + ensureOverlayWindowLevel: () => { + calls.push('ensure-level'); + }, + moveWindowTop: () => { + calls.push('move-top'); + }, + onWindowsVisibleOverlayBlur: () => { + calls.push('windows-visible-blur'); + }, + platform: 'darwin', + }); + + assert.equal(handled, false); + assert.deepEqual(calls, []); +}); + test('handleOverlayWindowBlurred preserves active visible/modal window stacking', () => { const calls: string[] = []; diff --git a/src/core/services/overlay-window.ts b/src/core/services/overlay-window.ts index 29406ef8..bb30f98f 100644 --- a/src/core/services/overlay-window.ts +++ b/src/core/services/overlay-window.ts @@ -1,4 +1,5 @@ -import { BrowserWindow, screen, type Session } from 'electron'; +import electron from 'electron'; +import type { BrowserWindow, Session } from 'electron'; import * as path from 'path'; import { WindowGeometry } from '../../types'; import { createLogger } from '../../logger'; @@ -8,12 +9,14 @@ import { handleOverlayWindowBlurred, type OverlayWindowKind, } from './overlay-window-input'; -import { buildOverlayWindowOptions } from './overlay-window-options'; +import { ensureHyprlandWindowFloatingByTitle } from './hyprland-window-placement'; +import { buildOverlayWindowOptions, OVERLAY_WINDOW_TITLES } from './overlay-window-options'; import { normalizeOverlayWindowBoundsForPlatform } from './overlay-window-bounds'; import { OVERLAY_WINDOW_CONTENT_READY_FLAG } from './overlay-window-flags'; export { OVERLAY_WINDOW_CONTENT_READY_FLAG } from './overlay-window-flags'; const logger = createLogger('main:overlay-window'); +const { BrowserWindow: ElectronBrowserWindow, screen } = electron; const overlayWindowLayerByInstance = new WeakMap(); const overlayWindowContentReady = new WeakSet(); @@ -50,7 +53,9 @@ export function updateOverlayWindowBounds( window: BrowserWindow | null, ): void { if (!geometry || !window || window.isDestroyed()) return; - window.setBounds(normalizeOverlayWindowBoundsForPlatform(geometry, process.platform, screen)); + const bounds = normalizeOverlayWindowBoundsForPlatform(geometry, process.platform, screen); + window.setBounds(bounds); + ensureHyprlandWindowFloatingByTitle({ title: window.getTitle(), bounds }); } export function ensureOverlayWindowLevel(window: BrowserWindow): void { @@ -67,6 +72,9 @@ export function ensureOverlayWindowLevel(window: BrowserWindow): void { return; } window.setAlwaysOnTop(true); + window.setVisibleOnAllWorkspaces(true, { visibleOnFullScreen: true }); + ensureHyprlandWindowFloatingByTitle({ title: window.getTitle() }); + window.moveTop(); } export function enforceOverlayLayerOrder(options: { @@ -97,7 +105,7 @@ export function createOverlayWindow( yomitanSession?: Session | null; }, ): BrowserWindow { - const window = new BrowserWindow(buildOverlayWindowOptions(kind, options)); + const window = new ElectronBrowserWindow(buildOverlayWindowOptions(kind, options)); (window as BrowserWindow & { [OVERLAY_WINDOW_CONTENT_READY_FLAG]?: boolean })[ OVERLAY_WINDOW_CONTENT_READY_FLAG ] = false; @@ -112,9 +120,15 @@ export function createOverlayWindow( }); window.webContents.on('did-finish-load', () => { + window.setTitle(OVERLAY_WINDOW_TITLES[kind]); options.onRuntimeOptionsChanged(); }); + window.webContents.on('page-title-updated', (event) => { + event.preventDefault(); + window.setTitle(OVERLAY_WINDOW_TITLES[kind]); + }); + window.once('ready-to-show', () => { overlayWindowContentReady.add(window); (window as BrowserWindow & { [OVERLAY_WINDOW_CONTENT_READY_FLAG]?: boolean })[ diff --git a/src/core/services/session-bindings.test.ts b/src/core/services/session-bindings.test.ts index 5cf95501..932e4843 100644 --- a/src/core/services/session-bindings.test.ts +++ b/src/core/services/session-bindings.test.ts @@ -2,7 +2,8 @@ import assert from 'node:assert/strict'; import test from 'node:test'; import type { Keybinding } from '../../types'; import type { ConfiguredShortcuts } from '../utils/shortcut-config'; -import { SPECIAL_COMMANDS } from '../../config/definitions'; +import { DEFAULT_CONFIG, DEFAULT_KEYBINDINGS, SPECIAL_COMMANDS } from '../../config/definitions'; +import { resolveConfiguredShortcuts } from '../utils/shortcut-config'; import { compileSessionBindings } from './session-bindings'; function createShortcuts(overrides: Partial = {}): ConfiguredShortcuts { @@ -179,6 +180,35 @@ test('compileSessionBindings drops conflicting bindings that canonicalize to the ]); }); +test('compileSessionBindings keeps default replay and next subtitle session actions on Linux', () => { + const result = compileSessionBindings({ + shortcuts: resolveConfiguredShortcuts(DEFAULT_CONFIG, DEFAULT_CONFIG), + keybindings: DEFAULT_KEYBINDINGS, + statsToggleKey: DEFAULT_CONFIG.stats.toggleKey, + platform: 'linux', + rawConfig: DEFAULT_CONFIG, + }); + + assert.deepEqual( + result.warnings.filter((warning) => warning.kind === 'conflict'), + [], + ); + const bySignature = new Map( + result.bindings.map((binding) => [ + `${binding.key.modifiers.join('+')}+${binding.key.code}`, + binding, + ]), + ); + + const replay = bySignature.get('ctrl+shift+KeyH'); + assert.equal(replay?.actionType, 'session-action'); + assert.equal(replay?.actionId, 'replayCurrentSubtitle'); + + const next = bySignature.get('ctrl+shift+KeyL'); + assert.equal(next?.actionType, 'session-action'); + assert.equal(next?.actionId, 'playNextSubtitle'); +}); + test('compileSessionBindings omits disabled bindings', () => { const result = compileSessionBindings({ shortcuts: createShortcuts({ diff --git a/src/core/services/stats-window-runtime.ts b/src/core/services/stats-window-runtime.ts index 55bbdec9..2ffbce95 100644 --- a/src/core/services/stats-window-runtime.ts +++ b/src/core/services/stats-window-runtime.ts @@ -3,10 +3,13 @@ import type { WindowGeometry } from '../../types'; const DEFAULT_STATS_WINDOW_WIDTH = 900; const DEFAULT_STATS_WINDOW_HEIGHT = 700; +export const STATS_WINDOW_TITLE = 'SubMiner Stats'; type StatsWindowLevelController = Pick & Partial>; +type StatsWindowBoundsController = Pick; + function isBareToggleKeyInput(input: Electron.Input, toggleKey: string): boolean { return ( input.type === 'keyDown' && @@ -30,12 +33,13 @@ export function buildStatsWindowOptions(options: { bounds?: WindowGeometry | null; }): BrowserWindowConstructorOptions { return { + title: STATS_WINDOW_TITLE, x: options.bounds?.x, y: options.bounds?.y, width: options.bounds?.width ?? DEFAULT_STATS_WINDOW_WIDTH, height: options.bounds?.height ?? DEFAULT_STATS_WINDOW_HEIGHT, frame: false, - transparent: true, + transparent: false, alwaysOnTop: true, resizable: false, skipTaskbar: true, @@ -43,7 +47,7 @@ export function buildStatsWindowOptions(options: { focusable: true, acceptFirstMouse: true, fullscreenable: false, - backgroundColor: '#1e1e2e', + backgroundColor: '#24273a', show: false, webPreferences: { nodeIntegration: false, @@ -54,6 +58,30 @@ export function buildStatsWindowOptions(options: { }; } +export function resolveStatsWindowOuterBoundsForContent( + window: StatsWindowBoundsController, + target: WindowGeometry, +): WindowGeometry { + const outer = window.getBounds(); + const content = window.getContentBounds(); + const leftInset = content.x - outer.x; + const topInset = content.y - outer.y; + const rightInset = outer.x + outer.width - (content.x + content.width); + const bottomInset = outer.y + outer.height - (content.y + content.height); + const insets = [leftInset, topInset, rightInset, bottomInset]; + + if (insets.some((inset) => !Number.isFinite(inset) || inset < 0)) { + return target; + } + + return { + x: target.x - leftInset, + y: target.y - topInset, + width: target.width + leftInset + rightInset, + height: target.height + topInset + bottomInset, + }; +} + export function promoteStatsWindowLevel( window: StatsWindowLevelController, platform: NodeJS.Platform = process.platform, diff --git a/src/core/services/stats-window.test.ts b/src/core/services/stats-window.test.ts index 8bc631c5..cc599afa 100644 --- a/src/core/services/stats-window.test.ts +++ b/src/core/services/stats-window.test.ts @@ -4,6 +4,7 @@ import { buildStatsWindowLoadFileOptions, buildStatsWindowOptions, promoteStatsWindowLevel, + resolveStatsWindowOuterBoundsForContent, shouldHideStatsWindowForInput, } from './stats-window-runtime'; @@ -18,12 +19,14 @@ test('buildStatsWindowOptions uses tracked overlay bounds and preload-friendly w }, }); + assert.equal(options.title, 'SubMiner Stats'); assert.equal(options.x, 120); assert.equal(options.y, 80); assert.equal(options.width, 1440); assert.equal(options.height, 900); assert.equal(options.frame, false); - assert.equal(options.transparent, true); + assert.equal(options.transparent, false); + assert.equal(options.backgroundColor, '#24273a'); assert.equal(options.resizable, false); assert.equal(options.webPreferences?.preload, '/tmp/preload-stats.js'); assert.equal(options.webPreferences?.contextIsolation, true); @@ -151,6 +154,33 @@ test('buildStatsWindowLoadFileOptions includes provided stats API base URL', () }); }); +test('resolveStatsWindowOuterBoundsForContent compensates for Wayland content insets', () => { + assert.deepEqual( + resolveStatsWindowOuterBoundsForContent( + { + getBounds: () => ({ x: 0, y: 0, width: 3440, height: 1440 }), + getContentBounds: () => ({ x: 0, y: 14, width: 3440, height: 1426 }), + }, + { x: 0, y: 0, width: 3440, height: 1440 }, + ), + { x: 0, y: -14, width: 3440, height: 1454 }, + ); +}); + +test('resolveStatsWindowOuterBoundsForContent ignores invalid inset geometry', () => { + const target = { x: 0, y: 0, width: 3440, height: 1440 }; + assert.deepEqual( + resolveStatsWindowOuterBoundsForContent( + { + getBounds: () => ({ x: 0, y: 0, width: 3440, height: 1440 }), + getContentBounds: () => ({ x: -1, y: 0, width: 3440, height: 1440 }), + }, + target, + ), + target, + ); +}); + test('promoteStatsWindowLevel raises stats above overlay level on macOS', () => { const calls: string[] = []; promoteStatsWindowLevel( diff --git a/src/core/services/stats-window.ts b/src/core/services/stats-window.ts index f35b11b9..83aade76 100644 --- a/src/core/services/stats-window.ts +++ b/src/core/services/stats-window.ts @@ -6,8 +6,11 @@ import { buildStatsWindowLoadFileOptions, buildStatsWindowOptions, promoteStatsWindowLevel, + resolveStatsWindowOuterBoundsForContent, shouldHideStatsWindowForInput, + STATS_WINDOW_TITLE, } from './stats-window-runtime.js'; +import { ensureHyprlandWindowFloatingByTitle } from './hyprland-window-placement.js'; let statsWindow: BrowserWindow | null = null; let toggleRegistered = false; @@ -27,20 +30,32 @@ export interface StatsWindowOptions { onVisibilityChanged?: (visible: boolean) => void; } -function syncStatsWindowBounds(window: BrowserWindow, bounds: WindowGeometry | null): void { - if (!bounds || window.isDestroyed()) return; +function syncStatsWindowBounds( + window: BrowserWindow, + bounds: WindowGeometry | null, +): WindowGeometry | null { + if (!bounds || window.isDestroyed()) return null; + const outerBounds = resolveStatsWindowOuterBoundsForContent(window, bounds); window.setBounds({ - x: bounds.x, - y: bounds.y, - width: bounds.width, - height: bounds.height, + x: outerBounds.x, + y: outerBounds.y, + width: outerBounds.width, + height: outerBounds.height, }); + return outerBounds; } function showStatsWindow(window: BrowserWindow, options: StatsWindowOptions): void { - syncStatsWindowBounds(window, options.resolveBounds()); + const bounds = options.resolveBounds(); + let placementBounds = syncStatsWindowBounds(window, bounds); promoteStatsWindowLevel(window); window.show(); + placementBounds = syncStatsWindowBounds(window, bounds) ?? placementBounds; + if ( + !ensureHyprlandWindowFloatingByTitle({ title: STATS_WINDOW_TITLE, bounds: placementBounds }) + ) { + placementBounds = syncStatsWindowBounds(window, bounds) ?? placementBounds; + } window.focus(); options.onVisibilityChanged?.(true); promoteStatsWindowLevel(window); @@ -59,6 +74,12 @@ export function toggleStatsOverlay(options: StatsWindowOptions): void { }), ); + statsWindow.setTitle(STATS_WINDOW_TITLE); + statsWindow.webContents.on('page-title-updated', (event) => { + event.preventDefault(); + statsWindow?.setTitle(STATS_WINDOW_TITLE); + }); + const indexPath = path.join(options.staticDir, 'index.html'); statsWindow.loadFile(indexPath, buildStatsWindowLoadFileOptions(options.getApiBaseUrl?.())); @@ -74,7 +95,6 @@ export function toggleStatsOverlay(options: StatsWindowOptions): void { options.onVisibilityChanged?.(false); } }); - statsWindow.once('ready-to-show', () => { if (!statsWindow) return; showStatsWindow(statsWindow, options); diff --git a/src/core/services/tokenizer.test.ts b/src/core/services/tokenizer.test.ts index 8a42f858..15f46871 100644 --- a/src/core/services/tokenizer.test.ts +++ b/src/core/services/tokenizer.test.ts @@ -25,6 +25,7 @@ interface YomitanTokenInput { reading?: string; headword?: string; isNameMatch?: boolean; + wordClasses?: string[]; } function makeDepsFromYomitanTokens( @@ -55,6 +56,7 @@ function makeDepsFromYomitanTokens( startPos, endPos, isNameMatch: token.isNameMatch ?? false, + wordClasses: token.wordClasses, }; }); }, @@ -77,7 +79,7 @@ function createDeferred() { }; } -test('tokenizeSubtitle assigns JLPT level to parsed Yomitan tokens', async () => { +test('tokenizeSubtitle splits same-line grammar endings before applying annotations', async () => { const result = await tokenizeSubtitle( '猫です', makeDeps({ @@ -86,35 +88,51 @@ test('tokenizeSubtitle assigns JLPT level to parsed Yomitan tokens', async () => ({ isDestroyed: () => false, webContents: { - executeJavaScript: async () => [ - { - source: 'scanning-parser', - index: 0, - content: [ - [ - { - text: '猫', - reading: 'ねこ', - headwords: [[{ term: '猫' }]], - }, - { - text: 'です', - reading: 'です', - headwords: [[{ term: 'です' }]], - }, + executeJavaScript: async (script: string) => { + if (script.includes('getTermFrequencies')) { + return []; + } + + return [ + { + source: 'scanning-parser', + index: 0, + content: [ + [ + { + text: '猫', + reading: 'ねこ', + headwords: [[{ term: '猫' }]], + }, + { + text: 'です', + reading: 'です', + headwords: [[{ term: 'です' }]], + }, + ], ], - ], - }, - ], + }, + ]; + }, }, }) as unknown as Electron.BrowserWindow, tokenizeWithMecab: async () => null, - getJlptLevel: (text) => (text === '猫' ? 'N5' : null), + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => (text === '猫' ? 40 : text === 'です' ? 50 : null), + getJlptLevel: (text) => (text === '猫' || text === 'です' ? 'N5' : null), + isKnownWord: (text) => text === 'です', }), ); - assert.equal(result.tokens?.length, 1); + assert.equal(result.tokens?.length, 2); + assert.equal(result.tokens?.[0]?.surface, '猫'); assert.equal(result.tokens?.[0]?.jlptLevel, 'N5'); + assert.equal(result.tokens?.[0]?.frequencyRank, 40); + assert.equal(result.tokens?.[1]?.surface, 'です'); + assert.equal(result.tokens?.[1]?.isKnown, false); + assert.equal(result.tokens?.[1]?.isNPlusOneTarget, false); + assert.equal(result.tokens?.[1]?.frequencyRank, undefined); + assert.equal(result.tokens?.[1]?.jlptLevel, undefined); }); test('tokenizeSubtitle preserves Yomitan name-match metadata on tokens', async () => { @@ -202,7 +220,7 @@ test('tokenizeSubtitle applies frequency dictionary ranks', async () => { assert.equal(result.tokens?.length, 2); assert.equal(result.tokens?.[0]?.frequencyRank, 23); - assert.equal(result.tokens?.[1]?.frequencyRank, 1200); + assert.equal(result.tokens?.[1]?.frequencyRank, undefined); }); test('tokenizeSubtitle uses left-to-right yomitan scanning to keep full katakana name tokens', async () => { @@ -1552,7 +1570,7 @@ test('tokenizeSubtitle assigns JLPT level to Yomitan tokens', async () => { assert.equal(result.tokens?.[0]?.jlptLevel, 'N4'); }); -test('tokenizeSubtitle can assign JLPT level to Yomitan particle token', async () => { +test('tokenizeSubtitle clears JLPT level from standalone Yomitan particle token', async () => { const result = await tokenizeSubtitle( 'は', makeDepsFromYomitanTokens([{ surface: 'は', reading: 'は', headword: 'は' }], { @@ -1561,7 +1579,7 @@ test('tokenizeSubtitle can assign JLPT level to Yomitan particle token', async ( ); assert.equal(result.tokens?.length, 1); - assert.equal(result.tokens?.[0]?.jlptLevel, 'N5'); + assert.equal(result.tokens?.[0]?.jlptLevel, undefined); }); test('tokenizeSubtitle returns null tokens for empty normalized text', async () => { @@ -2304,6 +2322,29 @@ test('tokenizeSubtitle selects one N+1 target token', async () => { assert.equal(targets[0]?.surface, '犬'); }); +test('tokenizeSubtitle does not select kana-only N+1 target tokens', async () => { + const result = await tokenizeSubtitle( + '私のばあい', + makeDepsFromYomitanTokens( + [ + { surface: '私', reading: 'わたし', headword: '私' }, + { surface: 'の', reading: 'の', headword: 'の' }, + { surface: 'ばあい', reading: 'ばあい', headword: '場合' }, + ], + { + getMinSentenceWordsForNPlusOne: () => 2, + isKnownWord: (text) => text === '私', + }, + ), + ); + + assert.equal(result.tokens?.length, 3); + assert.equal( + result.tokens?.some((token) => token.isNPlusOneTarget), + false, + ); +}); + test('tokenizeSubtitle does not mark target when sentence has multiple candidates', async () => { const result = await tokenizeSubtitle( '猫犬', @@ -2358,7 +2399,7 @@ test('tokenizeSubtitle applies N+1 target marking to Yomitan results', async () getYomitanParserWindow: () => parserWindow, tokenizeWithMecab: async () => null, isKnownWord: (text) => text === 'です', - getMinSentenceWordsForNPlusOne: () => 2, + getMinSentenceWordsForNPlusOne: () => 1, }), ); @@ -2805,6 +2846,141 @@ test('tokenizeSubtitle checks known words by surface when configured', async () assert.equal(result.tokens?.[0]?.isKnown, true); }); +test('tokenizeSubtitle preserves Yomitan compound token when MeCab components are known', async () => { + const text = '取り組んでもらいます'; + const result = await tokenizeSubtitle( + text, + makeDeps({ + getYomitanExt: () => ({ id: 'dummy-ext' }) as any, + getYomitanParserWindow: () => + ({ + isDestroyed: () => false, + webContents: { + executeJavaScript: async (script: string) => { + if (script.includes('getTermFrequencies')) { + return []; + } + + if (script.includes('parseText')) { + return [ + { + source: 'scanning-parser', + index: 0, + content: [ + [ + { + text: '取り組んで', + reading: 'とりくんで', + headwords: [[{ term: '取り組む' }]], + }, + ], + [ + { + text: 'もらいます', + reading: 'もらいます', + headwords: [[{ term: 'もらう' }]], + }, + ], + ], + }, + ]; + } + + return [ + { + surface: '取り', + reading: 'とり', + headword: '取る', + startPos: 0, + endPos: 2, + }, + { + surface: '組んで', + reading: 'くんで', + headword: '組む', + startPos: 2, + endPos: 5, + }, + { + surface: 'もらいます', + reading: 'もらいます', + headword: 'もらう', + startPos: 5, + endPos: 10, + }, + ]; + }, + }, + }) as unknown as Electron.BrowserWindow, + isKnownWord: (word) => word === '取る' || word === '組む' || word === 'もらう', + tokenizeWithMecab: async () => [ + { + headword: '取り組む', + surface: '取り組ん', + reading: 'トリクン', + startPos: 0, + endPos: 4, + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + pos3: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'で', + surface: 'で', + reading: 'デ', + startPos: 4, + endPos: 5, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '接続助詞', + pos3: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'もらう', + surface: 'もらい', + reading: 'モライ', + startPos: 5, + endPos: 8, + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '非自立', + pos3: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'ます', + surface: 'ます', + reading: 'マス', + startPos: 8, + endPos: 10, + partOfSpeech: PartOfSpeech.bound_auxiliary, + pos1: '助動詞', + pos2: '*', + pos3: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + }), + ); + + assert.equal(result.text, text); + assert.equal(result.tokens?.[0]?.surface, '取り組んで'); + assert.equal(result.tokens?.[0]?.headword, '取り組む'); + assert.equal(result.tokens?.[0]?.isKnown, false); + assert.equal(result.tokens?.[0]?.pos1, '動詞|助詞'); +}); + test('tokenizeSubtitle uses frequency surface match mode when configured', async () => { const result = await tokenizeSubtitle( '鍛えた', @@ -3034,6 +3210,85 @@ test('tokenizeSubtitle skips all enrichment stages when disabled', async () => { assert.equal(frequencyCalls, 0); }); +test('tokenizeSubtitle uses Yomitan word classes to classify standalone particles', async () => { + let mecabCalls = 0; + const result = await tokenizeSubtitle( + 'は', + makeDepsFromYomitanTokens( + [{ surface: 'は', reading: 'は', headword: 'は', wordClasses: ['prt'] }], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => (text === 'は' ? 10 : null), + getJlptLevel: (text) => (text === 'は' ? 'N5' : null), + tokenizeWithMecab: async () => { + mecabCalls += 1; + return null; + }, + }, + ), + ); + + assert.equal(mecabCalls, 1); + assert.equal(result.tokens?.length, 1); + assert.equal(result.tokens?.[0]?.partOfSpeech, PartOfSpeech.particle); + assert.equal(result.tokens?.[0]?.pos1, '助詞'); + assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false); + assert.equal(result.tokens?.[0]?.frequencyRank, undefined); + assert.equal(result.tokens?.[0]?.jlptLevel, undefined); +}); + +test('tokenizeSubtitle uses Yomitan word classes to classify auxiliary subclasses', async () => { + const result = await tokenizeSubtitle( + 'です', + makeDepsFromYomitanTokens( + [{ surface: 'です', reading: 'です', headword: 'です', wordClasses: ['aux-v'] }], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: () => 10, + getJlptLevel: () => 'N5', + tokenizeWithMecab: async () => null, + }, + ), + ); + + assert.equal(result.tokens?.length, 1); + assert.equal(result.tokens?.[0]?.partOfSpeech, PartOfSpeech.bound_auxiliary); + assert.equal(result.tokens?.[0]?.pos1, '助動詞'); + assert.equal(result.tokens?.[0]?.frequencyRank, undefined); + assert.equal(result.tokens?.[0]?.jlptLevel, undefined); +}); + +test('tokenizeSubtitle fills detailed MeCab POS when Yomitan word class supplies coarse POS', async () => { + const result = await tokenizeSubtitle( + 'は', + makeDepsFromYomitanTokens( + [{ surface: 'は', reading: 'は', headword: 'は', wordClasses: ['prt'] }], + { + tokenizeWithMecab: async () => [ + { + headword: 'は', + surface: 'は', + reading: 'ハ', + startPos: 0, + endPos: 1, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '係助詞', + pos3: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + }, + ), + ); + + assert.equal(result.tokens?.[0]?.partOfSpeech, PartOfSpeech.particle); + assert.equal(result.tokens?.[0]?.pos1, '助詞'); + assert.equal(result.tokens?.[0]?.pos2, '係助詞'); +}); + test('tokenizeSubtitle keeps frequency enrichment while n+1 is disabled', async () => { let knownCalls = 0; let mecabCalls = 0; @@ -3110,6 +3365,60 @@ test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and freque assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false); }); +test('tokenizeSubtitle clears known-word highlight for exact non-independent kanji noun tokens', async () => { + const result = await tokenizeSubtitle( + 'その点', + makeDepsFromYomitanTokens( + [ + { surface: 'その', reading: 'その', headword: 'その' }, + { surface: '点', reading: 'てん', headword: '点' }, + ], + { + isKnownWord: (text) => text === '点' || text === 'てん', + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => (text === '点' ? 1384 : null), + getJlptLevel: (text) => (text === '点' ? 'N3' : null), + tokenizeWithMecab: async () => [ + { + headword: 'その', + surface: 'その', + reading: 'ソノ', + startPos: 0, + endPos: 2, + partOfSpeech: PartOfSpeech.other, + pos1: '連体詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: '点', + surface: '点', + reading: 'テン', + startPos: 2, + endPos: 3, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '非自立', + pos3: '一般', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + }, + ), + ); + + assert.equal(result.tokens?.length, 2); + assert.equal(result.tokens?.[0]?.isKnown, false); + assert.equal(result.tokens?.[1]?.surface, '点'); + assert.equal(result.tokens?.[1]?.isKnown, false); + assert.equal(result.tokens?.[1]?.isNPlusOneTarget, false); + assert.equal(result.tokens?.[1]?.frequencyRank, undefined); + assert.equal(result.tokens?.[1]?.jlptLevel, undefined); +}); + test('tokenizeSubtitle keeps mecab-tagged interjections tokenized while clearing annotation metadata', async () => { const result = await tokenizeSubtitle( 'ぐはっ', @@ -3161,7 +3470,7 @@ test('tokenizeSubtitle keeps mecab-tagged interjections tokenized while clearing ); }); -test('tokenizeSubtitle keeps excluded interjections hoverable while clearing only their annotation metadata', async () => { +test('tokenizeSubtitle keeps excluded interjections hoverable while clearing annotation metadata', async () => { const result = await tokenizeSubtitle( 'ぐはっ 猫', makeDeps({ @@ -3235,7 +3544,7 @@ test('tokenizeSubtitle keeps excluded interjections hoverable while clearing onl ); }); -test('tokenizeSubtitle keeps explanatory ending variants hoverable while clearing only their annotation metadata', async () => { +test('tokenizeSubtitle keeps explanatory ending variants hoverable while clearing annotation metadata', async () => { const result = await tokenizeSubtitle( '猫んです', makeDepsFromYomitanTokens( @@ -3306,7 +3615,7 @@ test('tokenizeSubtitle keeps explanatory ending variants hoverable while clearin ); }); -test('tokenizeSubtitle keeps standalone grammar-only tokens hoverable while clearing only their annotation metadata', async () => { +test('tokenizeSubtitle keeps standalone grammar-only tokens hoverable while clearing annotation metadata', async () => { const result = await tokenizeSubtitle( '私はこの猫です', makeDeps({ @@ -3425,7 +3734,7 @@ test('tokenizeSubtitle keeps standalone grammar-only tokens hoverable while clea ); }); -test('tokenizeSubtitle keeps trailing quote-particle merged tokens hoverable while clearing only their annotation metadata', async () => { +test('tokenizeSubtitle keeps trailing quote-particle merged tokens hoverable while clearing annotation metadata', async () => { const result = await tokenizeSubtitle( 'どうしてもって', makeDepsFromYomitanTokens( @@ -3574,7 +3883,7 @@ test('tokenizeSubtitle excludes single-kana merged tokens from frequency highlig assert.equal(result.tokens?.[0]?.frequencyRank, undefined); }); -test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => { +test('tokenizeSubtitle excludes merged kana-only function/content token from frequency and N+1', async () => { const result = await tokenizeSubtitle( 'になれば', makeDepsFromYomitanTokens([{ surface: 'になれば', reading: 'になれば', headword: 'なる' }], { @@ -3628,7 +3937,7 @@ test('tokenizeSubtitle excludes merged function/content token from frequency hig assert.equal(result.tokens?.length, 1); assert.equal(result.tokens?.[0]?.pos1, '助詞|動詞'); assert.equal(result.tokens?.[0]?.frequencyRank, undefined); - assert.equal(result.tokens?.[0]?.isNPlusOneTarget, true); + assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false); }); test('tokenizeSubtitle clears all annotations for kana-only demonstrative helper merges', async () => { @@ -3827,7 +4136,7 @@ test('tokenizeSubtitle clears all annotations for explanatory pondering endings' surface: 'どうかしちゃった', headword: 'どうかしちゃう', isKnown: false, - isNPlusOneTarget: true, + isNPlusOneTarget: false, frequencyRank: 3200, jlptLevel: 'N3', }, @@ -3903,6 +4212,159 @@ test('tokenizeSubtitle keeps frequency for content-led merged token with trailin assert.equal(result.tokens?.[0]?.frequencyRank, 5468); }); +test('tokenizeSubtitle keeps frequency for ordinal prefix-noun tokens', async () => { + const result = await tokenizeSubtitle( + '第二走者', + makeDepsFromYomitanTokens( + [ + { surface: '第二', reading: 'だいに', headword: '第二' }, + { surface: '走者', reading: 'そうしゃ', headword: '走者' }, + ], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => (text === '第二' ? 1820 : text === '走者' ? 41555 : null), + tokenizeWithMecab: async () => [ + { + headword: '第', + surface: '第', + reading: 'ダイ', + startPos: 0, + endPos: 1, + partOfSpeech: PartOfSpeech.other, + pos1: '接頭詞', + pos2: '数接続', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: '二', + surface: '二', + reading: 'ニ', + startPos: 1, + endPos: 2, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '数', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: '走者', + surface: '走者', + reading: 'ソウシャ', + startPos: 2, + endPos: 4, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + getMinSentenceWordsForNPlusOne: () => 1, + }, + ), + ); + + assert.equal(result.tokens?.[0]?.surface, '第二'); + assert.equal(result.tokens?.[0]?.pos1, '接頭詞|名詞'); + assert.equal(result.tokens?.[0]?.pos2, '数接続|数'); + assert.equal(result.tokens?.[0]?.frequencyRank, 1820); +}); + +test('tokenizeSubtitle keeps frequency for honorific prefix-noun tokens', async () => { + const result = await tokenizeSubtitle( + 'ご機嫌が良くない', + makeDepsFromYomitanTokens( + [ + { surface: 'ご機嫌', reading: 'ごきげん', headword: 'ご機嫌' }, + { surface: 'が', reading: 'が', headword: 'が' }, + { surface: '良くない', reading: 'よくない', headword: '良い' }, + ], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => (text === 'ご機嫌' ? 5484 : null), + tokenizeWithMecab: async () => [ + { + headword: 'ご', + surface: 'ご', + reading: 'ゴ', + startPos: 0, + endPos: 1, + partOfSpeech: PartOfSpeech.other, + pos1: '接頭詞', + pos2: '名詞接続', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: '機嫌', + surface: '機嫌', + reading: 'キゲン', + startPos: 1, + endPos: 3, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'が', + surface: 'が', + reading: 'ガ', + startPos: 3, + endPos: 4, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '格助詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: '良い', + surface: '良く', + reading: 'ヨク', + startPos: 4, + endPos: 6, + partOfSpeech: PartOfSpeech.i_adjective, + pos1: '形容詞', + pos2: '自立', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'ない', + surface: 'ない', + reading: 'ナイ', + startPos: 6, + endPos: 8, + partOfSpeech: PartOfSpeech.bound_auxiliary, + pos1: '助動詞', + pos2: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + getMinSentenceWordsForNPlusOne: () => 1, + }, + ), + ); + + assert.equal(result.tokens?.[0]?.surface, 'ご機嫌'); + assert.equal(result.tokens?.[0]?.pos1, '接頭詞|名詞'); + assert.equal(result.tokens?.[0]?.pos2, '名詞接続|一般'); + assert.equal(result.tokens?.[0]?.frequencyRank, 5484); +}); + test('tokenizeSubtitle clears all annotations for explanatory contrast endings', async () => { const result = await tokenizeSubtitle( '最近辛いものが続いとるんですけど', @@ -4069,6 +4531,211 @@ test('tokenizeSubtitle clears all annotations for explanatory contrast endings', ); }); +test('tokenizeSubtitle clears annotations for ja-nai explanatory endings and aru verbs', async () => { + const result = await tokenizeSubtitle( + 'みたいなのあるじゃないですか', + makeDepsFromYomitanTokens( + [ + { surface: 'みたいな', reading: 'みたいな', headword: 'みたい' }, + { surface: 'の', reading: 'の', headword: 'の' }, + { surface: 'ある', reading: 'ある', headword: 'ある' }, + { surface: 'じゃないですか', reading: 'じゃないですか', headword: 'じゃない' }, + ], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => + text === 'みたい' ? 320 : text === 'ある' ? 240 : text === 'じゃない' ? 80 : null, + getJlptLevel: (text) => + text === 'みたい' ? 'N4' : text === 'ある' ? 'N5' : text === 'じゃない' ? 'N5' : null, + isKnownWord: (text) => text === 'みたい' || text === 'の' || text === 'ある', + getMinSentenceWordsForNPlusOne: () => 1, + tokenizeWithMecab: async () => [ + { + headword: 'みたい', + surface: 'みたい', + reading: 'ミタイ', + startPos: 0, + endPos: 3, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '非自立', + pos3: '形容動詞語幹', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'だ', + surface: 'な', + reading: 'ナ', + startPos: 3, + endPos: 4, + partOfSpeech: PartOfSpeech.bound_auxiliary, + pos1: '助動詞', + pos2: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'の', + surface: 'の', + reading: 'ノ', + startPos: 4, + endPos: 5, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '非自立', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'ある', + surface: 'ある', + reading: 'アル', + startPos: 5, + endPos: 7, + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'じゃない', + surface: 'じゃない', + reading: 'ジャナイ', + startPos: 7, + endPos: 11, + partOfSpeech: PartOfSpeech.i_adjective, + pos1: '接続詞|形容詞', + pos2: '*|自立', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'です', + surface: 'です', + reading: 'デス', + startPos: 11, + endPos: 13, + partOfSpeech: PartOfSpeech.bound_auxiliary, + pos1: '助動詞', + pos2: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'か', + surface: 'か', + reading: 'カ', + startPos: 13, + endPos: 14, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '副助詞/並立助詞/終助詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + }, + ), + ); + + const tokenSummary = result.tokens?.map((token) => ({ + surface: token.surface, + headword: token.headword, + isKnown: token.isKnown, + isNPlusOneTarget: token.isNPlusOneTarget, + frequencyRank: token.frequencyRank, + jlptLevel: token.jlptLevel, + })); + + assert.deepEqual( + tokenSummary?.find((token) => token.surface === 'じゃないですか'), + { + surface: 'じゃないですか', + headword: 'じゃない', + isKnown: false, + isNPlusOneTarget: false, + frequencyRank: undefined, + jlptLevel: undefined, + }, + ); + assert.deepEqual( + tokenSummary?.find((token) => token.surface === 'ある'), + { + surface: 'ある', + headword: 'ある', + isKnown: false, + isNPlusOneTarget: false, + frequencyRank: undefined, + jlptLevel: undefined, + }, + ); +}); + +test('tokenizeSubtitle clears annotations for standalone polite copula endings without POS metadata', async () => { + const result = await tokenizeSubtitle( + '現実は感じですよ', + makeDepsFromYomitanTokens( + [ + { surface: '現実', reading: 'げんじつ', headword: '現実' }, + { surface: 'は', reading: 'は', headword: 'は' }, + { surface: '感じ', reading: 'かんじ', headword: '感じ' }, + { surface: 'ですよ', reading: 'ですよ', headword: 'です' }, + ], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => + text === '現実' ? 600 : text === '感じ' ? 240 : text === 'です' ? 50 : null, + getJlptLevel: (text) => + text === '現実' ? 'N3' : text === '感じ' ? 'N4' : text === 'です' ? 'N5' : null, + isKnownWord: (text) => text === '現実' || text === 'は' || text === 'です', + getMinSentenceWordsForNPlusOne: () => 1, + tokenizeWithMecab: async () => null, + }, + ), + ); + + const tokenSummary = result.tokens?.map((token) => ({ + surface: token.surface, + headword: token.headword, + isKnown: token.isKnown, + isNPlusOneTarget: token.isNPlusOneTarget, + frequencyRank: token.frequencyRank, + jlptLevel: token.jlptLevel, + })); + + assert.deepEqual( + tokenSummary?.find((token) => token.surface === 'ですよ'), + { + surface: 'ですよ', + headword: 'です', + isKnown: false, + isNPlusOneTarget: false, + frequencyRank: undefined, + jlptLevel: undefined, + }, + ); + assert.deepEqual( + tokenSummary?.find((token) => token.surface === '感じ'), + { + surface: '感じ', + headword: '感じ', + isKnown: false, + isNPlusOneTarget: true, + frequencyRank: 240, + jlptLevel: 'N4', + }, + ); +}); + test('tokenizeSubtitle clears annotations for ことに while preserving lexical N+1 target', async () => { const result = await tokenizeSubtitle( 'さっきの俺と違うことに気付かないのかい?', @@ -4288,6 +4955,251 @@ test('tokenizeSubtitle clears annotations for ことに while preserving lexical ); }); +test('tokenizeSubtitle clears annotations for auxiliary inflection fragments while preserving lexical N+1 target', async () => { + const result = await tokenizeSubtitle( + '私れた猫', + makeDepsFromYomitanTokens( + [ + { surface: '私', reading: 'わたし', headword: '私' }, + { surface: 'れた', reading: 'れた', headword: 'れる' }, + { surface: '猫', reading: 'ねこ', headword: '猫' }, + ], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => + text === '私' ? 50 : text === 'れる' ? 18 : text === '猫' ? 900 : null, + getJlptLevel: (text) => + text === '私' ? 'N5' : text === 'れる' ? 'N4' : text === '猫' ? 'N5' : null, + isKnownWord: (text) => text === '私' || text === 'れる', + getMinSentenceWordsForNPlusOne: () => 1, + tokenizeWithMecab: async () => [ + { + headword: '私', + surface: '私', + reading: 'ワタシ', + startPos: 0, + endPos: 1, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '代名詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'れる', + surface: 'れ', + reading: 'レ', + startPos: 1, + endPos: 2, + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '接尾', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'た', + surface: 'た', + reading: 'タ', + startPos: 2, + endPos: 3, + partOfSpeech: PartOfSpeech.bound_auxiliary, + pos1: '助動詞', + pos2: '*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: '猫', + surface: '猫', + reading: 'ネコ', + startPos: 3, + endPos: 4, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + }, + ), + ); + + const tokenSummary = result.tokens?.map((token) => ({ + surface: token.surface, + headword: token.headword, + isKnown: token.isKnown, + isNPlusOneTarget: token.isNPlusOneTarget, + frequencyRank: token.frequencyRank, + jlptLevel: token.jlptLevel, + })); + + assert.deepEqual( + tokenSummary?.find((token) => token.surface === 'れた'), + { + surface: 'れた', + headword: 'れる', + isKnown: false, + isNPlusOneTarget: false, + frequencyRank: undefined, + jlptLevel: undefined, + }, + ); + assert.deepEqual( + tokenSummary?.find((token) => token.surface === '猫'), + { + surface: '猫', + headword: '猫', + isKnown: false, + isNPlusOneTarget: true, + frequencyRank: 900, + jlptLevel: 'N5', + }, + ); +}); + +test('tokenizeSubtitle clears annotations for te-kureru auxiliary helper spans', async () => { + const result = await tokenizeSubtitle( + 'ベアトリスがいてくれたから', + makeDepsFromYomitanTokens( + [ + { surface: 'ベアトリス', reading: 'べあとりす', headword: 'ベアトリス' }, + { surface: 'が', reading: 'が', headword: 'が' }, + { surface: 'い', reading: 'い', headword: 'いる' }, + { surface: 'てく', reading: 'てく', headword: 'てく' }, + { surface: 'れた', reading: 'れた', headword: 'れる' }, + { surface: 'から', reading: 'から', headword: 'から' }, + ], + { + getFrequencyDictionaryEnabled: () => true, + getFrequencyRank: (text) => + text === 'ベアトリス' ? 1000 : text === 'てく' ? 140 : text === 'れる' ? 19 : null, + getJlptLevel: (text) => + text === 'てく' || text === 'れる' || text === 'いる' ? 'N4' : null, + isKnownWord: (text) => text === 'てく' || text === 'れる', + getMinSentenceWordsForNPlusOne: () => 1, + tokenizeWithMecab: async () => [ + { + headword: 'ベアトリス', + surface: 'ベアトリス', + reading: 'ベアトリス', + startPos: 0, + endPos: 5, + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '固有名詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'が', + surface: 'が', + reading: 'ガ', + startPos: 5, + endPos: 6, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '格助詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'いる', + surface: 'い', + reading: 'イ', + startPos: 6, + endPos: 7, + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'てく', + surface: 'てく', + reading: 'テク', + startPos: 7, + endPos: 9, + partOfSpeech: PartOfSpeech.verb, + pos1: '助詞|動詞', + pos2: '接続助詞|非自立', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'れる', + surface: 'れた', + reading: 'レタ', + startPos: 9, + endPos: 11, + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞|助動詞', + pos2: '接尾|*', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + { + headword: 'から', + surface: 'から', + reading: 'カラ', + startPos: 11, + endPos: 13, + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '接続助詞', + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + }, + ], + }, + ), + ); + + const tokenSummary = result.tokens?.map((token) => ({ + surface: token.surface, + headword: token.headword, + isKnown: token.isKnown, + isNPlusOneTarget: token.isNPlusOneTarget, + frequencyRank: token.frequencyRank, + jlptLevel: token.jlptLevel, + })); + + assert.deepEqual( + tokenSummary?.find((token) => token.surface === 'てく'), + { + surface: 'てく', + headword: 'てく', + isKnown: false, + isNPlusOneTarget: false, + frequencyRank: undefined, + jlptLevel: undefined, + }, + ); + assert.deepEqual( + tokenSummary?.find((token) => token.surface === 'れた'), + { + surface: 'れた', + headword: 'れる', + isKnown: false, + isNPlusOneTarget: false, + frequencyRank: undefined, + jlptLevel: undefined, + }, + ); +}); + test('tokenizeSubtitle excludes default non-independent pos2 from N+1 when JLPT/frequency are disabled', async () => { let mecabCalls = 0; const result = await tokenizeSubtitle( diff --git a/src/core/services/tokenizer.ts b/src/core/services/tokenizer.ts index 240a97a8..63be5eba 100644 --- a/src/core/services/tokenizer.ts +++ b/src/core/services/tokenizer.ts @@ -96,6 +96,7 @@ interface TokenizerAnnotationOptions { minSentenceWordsForNPlusOne: number | undefined; pos1Exclusions: ReadonlySet; pos2Exclusions: ReadonlySet; + sourceText?: string; } let parserEnrichmentWorkerRuntimeModulePromise: Promise< @@ -159,7 +160,7 @@ async function applyAnnotationStage( options: TokenizerAnnotationOptions, ): Promise { if (!hasAnyAnnotationEnabled(options)) { - return tokens; + return stripSubtitleAnnotationMetadata(tokens, options); } if (!annotationStageModulePromise) { @@ -178,7 +179,10 @@ async function applyAnnotationStage( ); } -async function stripSubtitleAnnotationMetadata(tokens: MergedToken[]): Promise { +async function stripSubtitleAnnotationMetadata( + tokens: MergedToken[], + options: TokenizerAnnotationOptions, +): Promise { if (tokens.length === 0) { return tokens; } @@ -188,7 +192,7 @@ async function stripSubtitleAnnotationMetadata(tokens: MergedToken[]): Promise annotationStage.stripSubtitleAnnotationMetadata(token)); + return tokens.map((token) => annotationStage.stripSubtitleAnnotationMetadata(token, options)); } export function createTokenizerDepsRuntime( @@ -333,6 +337,66 @@ function normalizeSelectedYomitanTokens(tokens: MergedToken[]): MergedToken[] { })); } +function normalizeYomitanWordClasses(wordClasses: unknown): string[] { + if (!Array.isArray(wordClasses)) { + return []; + } + + const normalized: string[] = []; + for (const wordClass of wordClasses) { + if (typeof wordClass !== 'string') { + continue; + } + const trimmed = wordClass.trim(); + if (trimmed && !normalized.includes(trimmed)) { + normalized.push(trimmed); + } + } + return normalized; +} + +function resolvePartOfSpeechFromYomitanWordClasses(wordClasses: string[]): { + partOfSpeech: PartOfSpeech; + pos1?: string; +} { + if (wordClasses.includes('prt')) { + return { partOfSpeech: PartOfSpeech.particle, pos1: '助詞' }; + } + if (wordClasses.some((wordClass) => wordClass === 'aux' || wordClass.startsWith('aux-'))) { + return { partOfSpeech: PartOfSpeech.bound_auxiliary, pos1: '助動詞' }; + } + if (wordClasses.some((wordClass) => wordClass.startsWith('v'))) { + return { partOfSpeech: PartOfSpeech.verb, pos1: '動詞' }; + } + if (wordClasses.includes('adj-i') || wordClasses.includes('adj-ix')) { + return { partOfSpeech: PartOfSpeech.i_adjective, pos1: '形容詞' }; + } + if (wordClasses.includes('adj-na')) { + return { partOfSpeech: PartOfSpeech.na_adjective, pos1: '名詞' }; + } + if ( + wordClasses.some( + (wordClass) => + wordClass === 'n' || + wordClass === 'num' || + wordClass === 'ctr' || + wordClass === 'pn' || + wordClass.startsWith('n-'), + ) + ) { + return { partOfSpeech: PartOfSpeech.noun, pos1: '名詞' }; + } + + return { partOfSpeech: PartOfSpeech.other }; +} + +function getYomitanWordClassPosMetadata(wordClasses: unknown): { + partOfSpeech: PartOfSpeech; + pos1?: string; +} { + return resolvePartOfSpeechFromYomitanWordClasses(normalizeYomitanWordClasses(wordClasses)); +} + function resolveFrequencyLookupText( token: MergedToken, matchMode: FrequencyDictionaryMatchMode, @@ -622,21 +686,23 @@ async function parseWithYomitanInternalParser( return null; } const normalizedSelectedTokens = normalizeSelectedYomitanTokens( - selectedTokens.map( - (token): MergedToken => ({ + selectedTokens.map((token): MergedToken => { + const posMetadata = getYomitanWordClassPosMetadata(token.wordClasses); + return { surface: token.surface, reading: token.reading, headword: token.headword, startPos: token.startPos, endPos: token.endPos, - partOfSpeech: PartOfSpeech.other, + partOfSpeech: posMetadata.partOfSpeech, + pos1: posMetadata.pos1, isMerged: true, isKnown: false, isNPlusOneTarget: false, isNameMatch: token.isNameMatch ?? false, frequencyRank: token.frequencyRank, - }), - ), + }; + }), ); if (deps.getYomitanGroupDebugEnabled?.() === true) { @@ -716,12 +782,11 @@ export async function tokenizeSubtitle( .replace(/\s+/g, ' ') .trim(); const annotationOptions = getAnnotationOptions(deps); + annotationOptions.sourceText = tokenizeText; const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps, annotationOptions); if (yomitanTokens && yomitanTokens.length > 0) { - const annotatedTokens = await stripSubtitleAnnotationMetadata( - await applyAnnotationStage(yomitanTokens, deps, annotationOptions), - ); + const annotatedTokens = await applyAnnotationStage(yomitanTokens, deps, annotationOptions); return { text: displayText, tokens: annotatedTokens.length > 0 ? annotatedTokens : null, diff --git a/src/core/services/tokenizer/annotation-stage.test.ts b/src/core/services/tokenizer/annotation-stage.test.ts index f5fca15c..d64a50d7 100644 --- a/src/core/services/tokenizer/annotation-stage.test.ts +++ b/src/core/services/tokenizer/annotation-stage.test.ts @@ -259,6 +259,73 @@ test('shouldExcludeTokenFromSubtitleAnnotations excludes explanatory contrast en assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true); }); +test('shouldExcludeTokenFromSubtitleAnnotations excludes ja-nai explanatory endings', () => { + const tokens = [ + makeToken({ + surface: 'じゃない', + headword: 'じゃない', + reading: 'ジャナイ', + partOfSpeech: PartOfSpeech.i_adjective, + pos1: '接続詞|形容詞', + pos2: '*|自立', + }), + makeToken({ + surface: 'じゃないですか', + headword: 'じゃない', + reading: 'ジャナイデスカ', + partOfSpeech: PartOfSpeech.i_adjective, + pos1: '接続詞|形容詞|助動詞|助詞', + pos2: '*|自立|*|副助詞/並立助詞/終助詞', + }), + ]; + + for (const token of tokens) { + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, token.surface); + } +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes standalone polite copula suffix endings without POS tags', () => { + const tokens = [ + makeToken({ + surface: 'ですよ', + headword: 'です', + reading: 'デスヨ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }), + ]; + + for (const token of tokens) { + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, token.surface); + } +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes grammar-ending patterns without enumerating variants', () => { + const tokens = [ + makeToken({ + surface: 'ですわ', + headword: 'です', + reading: 'デスワ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }), + makeToken({ + surface: 'ではないですか', + headword: 'ない', + reading: 'デハナイデスカ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }), + ]; + + for (const token of tokens) { + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, token.surface); + } +}); + test('shouldExcludeTokenFromSubtitleAnnotations excludes auxiliary-stem そうだ grammar tails', () => { const token = makeToken({ surface: 'そうだ', @@ -448,7 +515,169 @@ test('shouldExcludeTokenFromVocabularyPersistence excludes common frequency stop } }); -test('stripSubtitleAnnotationMetadata keeps token hover data while clearing annotation fields', () => { +test('shouldExcludeTokenFromSubtitleAnnotations excludes standalone して grammar helper fragments', () => { + const token = makeToken({ + surface: 'して', + headword: 'する', + reading: 'シテ', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞|助詞', + pos2: '自立|接続助詞', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true); +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes inflected standalone して grammar helper fragments', () => { + const token = makeToken({ + surface: 'してる', + headword: 'する', + reading: 'シテル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞|助動詞', + pos2: '自立|非自立', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true); +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes standalone particle fragments without POS tags', () => { + const token = makeToken({ + surface: 'と', + headword: 'と', + reading: 'ト', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true); +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes standalone connective particle fragments without POS tags', () => { + const token = makeToken({ + surface: 'たって', + headword: 'たって', + reading: 'タッテ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true); +}); + +test('shouldExcludeTokenFromSubtitleAnnotations keeps lexical verbs whose reading matches connective particles', () => { + const token = makeToken({ + surface: '立って', + headword: '立つ', + reading: 'タッテ', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), false); +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes rhetorical もんか grammar particle phrases', () => { + for (const surface of ['もんか', 'ものか']) { + const token = makeToken({ + surface, + headword: surface, + reading: surface === 'もんか' ? 'モンカ' : 'モノカ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞|助詞', + pos2: '非自立|副助詞/並立助詞/終助詞', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, surface); + } +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes bare くれ auxiliary fragments', () => { + const token = makeToken({ + surface: 'くれ', + headword: '暮れ', + reading: 'クレ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + }); + + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true); +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes aru existence verbs', () => { + for (const token of [ + makeToken({ + surface: 'ある', + headword: 'ある', + reading: 'アル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + }), + makeToken({ + surface: '有る', + headword: '有る', + reading: 'アル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + }), + ]) { + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, token.surface); + } +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes standalone quote particle and auxiliary grammar terms', () => { + for (const token of [ + makeToken({ + surface: 'って', + headword: 'って', + reading: 'ッテ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }), + makeToken({ + surface: 'べき', + headword: 'べき', + reading: 'ベキ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + }), + ]) { + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, token.surface); + } +}); + +test('shouldExcludeTokenFromSubtitleAnnotations excludes single-kana surface fragments', () => { + for (const token of [ + makeToken({ + surface: 'ふ', + headword: '不', + reading: 'フ', + partOfSpeech: PartOfSpeech.other, + pos1: '接頭詞', + pos2: '', + }), + makeToken({ + surface: 'フ', + headword: '負', + reading: 'フ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + }), + ]) { + assert.equal(shouldExcludeTokenFromSubtitleAnnotations(token), true, token.surface); + } +}); + +test('stripSubtitleAnnotationMetadata keeps known hover data while clearing non-known annotation fields', () => { const token = makeToken({ surface: 'は', headword: 'は', @@ -464,7 +693,6 @@ test('stripSubtitleAnnotationMetadata keeps token hover data while clearing anno assert.deepEqual(stripSubtitleAnnotationMetadata(token), { ...token, - isKnown: false, isNPlusOneTarget: false, isNameMatch: false, jlptLevel: undefined, @@ -526,13 +754,13 @@ test('annotateTokens keeps other annotations for name matches when name highligh let jlptLookupCalls = 0; const tokens = [ makeToken({ - surface: 'オリヴィア', - reading: 'オリヴィア', - headword: 'オリヴィア', + surface: '山田', + reading: 'ヤマダ', + headword: '山田', isNameMatch: true, frequencyRank: 42, startPos: 0, - endPos: 5, + endPos: 2, }), ]; @@ -583,6 +811,63 @@ test('annotateTokens N+1 handoff marks expected target when threshold is satisfi assert.equal(result[2]?.isNPlusOneTarget, false); }); +test('annotateTokens does not mark kana-only unknown target as N+1', () => { + const tokens = [ + makeToken({ + surface: '何やら', + headword: '何やら', + reading: 'ナニヤラ', + pos1: '副詞', + startPos: 0, + endPos: 3, + }), + makeToken({ + surface: 'ボタン', + headword: 'ボタン', + reading: 'ボタン', + pos1: '名詞', + startPos: 3, + endPos: 6, + }), + makeToken({ + surface: 'すいっち', + headword: 'すいっち', + reading: 'スイッチ', + pos1: '名詞', + startPos: 6, + endPos: 10, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '何やら' || text === 'ボタン', + }), + { minSentenceWordsForNPlusOne: 3 }, + ); + + assert.equal(result[2]?.isNPlusOneTarget, false); +}); + +test('annotateTokens still marks kanji unknown target in otherwise eligible sentence as N+1', () => { + const tokens = [ + makeToken({ surface: '私', headword: '私', pos1: '名詞', startPos: 0, endPos: 1 }), + makeToken({ surface: '猫', headword: '猫', pos1: '名詞', startPos: 1, endPos: 2 }), + makeToken({ surface: '装置…', headword: '装置', pos1: '名詞', startPos: 2, endPos: 5 }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '私' || text === '猫', + }), + { minSentenceWordsForNPlusOne: 3 }, + ); + + assert.equal(result[2]?.isNPlusOneTarget, true); +}); + test('annotateTokens N+1 minimum sentence words counts only eligible word tokens', () => { const tokens = [ makeToken({ surface: '猫', headword: '猫', startPos: 0, endPos: 1 }), @@ -618,6 +903,134 @@ test('annotateTokens N+1 minimum sentence words counts only eligible word tokens assert.equal(result[0]?.isNPlusOneTarget, false); }); +test('annotateTokens N+1 minimum sentence words excludes unknown tokens filtered from N+1 targeting', () => { + const tokens = [ + makeToken({ surface: '私', headword: '私', pos1: '名詞', startPos: 0, endPos: 1 }), + makeToken({ surface: '猫', headword: '猫', pos1: '名詞', startPos: 1, endPos: 2 }), + makeToken({ + surface: 'スイッチ', + headword: 'スイッチ', + reading: 'スイッチ', + pos1: '名詞', + startPos: 2, + endPos: 6, + }), + makeToken({ surface: '装置', headword: '装置', pos1: '名詞', startPos: 6, endPos: 8 }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '私' || text === '猫', + }), + { minSentenceWordsForNPlusOne: 4 }, + ); + + assert.equal(result[3]?.isNPlusOneTarget, false); +}); + +test('annotateTokens N+1 sentence word count respects source punctuation gaps omitted by Yomitan', () => { + const tokens = [ + makeToken({ + surface: '私', + headword: '私', + pos1: '名詞', + startPos: 0, + endPos: 1, + }), + makeToken({ + surface: '猫', + headword: '猫', + pos1: '名詞', + startPos: 1, + endPos: 2, + }), + makeToken({ + surface: '犬', + headword: '犬', + pos1: '名詞', + startPos: 2, + endPos: 3, + }), + makeToken({ + surface: 'ふざけん', + headword: 'ふざける', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + startPos: 4, + endPos: 8, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '私' || text === '猫' || text === '犬', + }), + { + minSentenceWordsForNPlusOne: 3, + sourceText: '私猫犬!ふざけんなよ!', + }, + ); + + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[1]?.isNPlusOneTarget, false); + assert.equal(result[2]?.isNPlusOneTarget, false); + assert.equal(result[3]?.isNPlusOneTarget, false); +}); + +test('annotateTokens N+1 sentence word count normalizes line breaks before gap detection', () => { + const tokens = [ + makeToken({ + surface: '私', + headword: '私', + pos1: '名詞', + startPos: 0, + endPos: 1, + }), + makeToken({ + surface: '猫', + headword: '猫', + pos1: '名詞', + startPos: 2, + endPos: 3, + }), + makeToken({ + surface: '犬', + headword: '犬', + pos1: '名詞', + startPos: 3, + endPos: 4, + }), + makeToken({ + surface: 'ふざけん', + headword: 'ふざける', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + startPos: 5, + endPos: 9, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '私' || text === '猫' || text === '犬', + }), + { + minSentenceWordsForNPlusOne: 3, + sourceText: '私\r\n猫犬!ふざけんなよ!', + }, + ); + + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[1]?.isNPlusOneTarget, false); + assert.equal(result[2]?.isNPlusOneTarget, false); + assert.equal(result[3]?.isNPlusOneTarget, false); +}); + test('annotateTokens applies configured pos1 exclusions to both frequency and N+1', () => { const tokens = [ makeToken({ @@ -675,7 +1088,7 @@ test('annotateTokens allows previously default-excluded pos1 when removed from e }); assert.equal(result[0]?.frequencyRank, 8); - assert.equal(result[0]?.isNPlusOneTarget, true); + assert.equal(result[0]?.isNPlusOneTarget, false); }); test('annotateTokens excludes default non-independent pos2 from frequency and N+1', () => { @@ -700,6 +1113,37 @@ test('annotateTokens excludes default non-independent pos2 from frequency and N+ assert.equal(result[0]?.isNPlusOneTarget, false); }); +test('annotateTokens clears known-word status for non-independent kanji noun tokens', () => { + const tokens = [ + makeToken({ + surface: '点', + reading: 'てん', + headword: '点', + partOfSpeech: PartOfSpeech.other, + pos1: '名詞', + pos2: '非自立', + pos3: '一般', + startPos: 2, + endPos: 3, + frequencyRank: 1384, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '点' || text === 'てん', + getJlptLevel: (text) => (text === '点' ? 'N3' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); +}); + test('annotateTokens clears all annotations for non-independent kanji noun tokens under unified gate', () => { const tokens = [ makeToken({ @@ -747,7 +1191,7 @@ test('annotateTokens excludes likely kana SFX tokens from frequency when POS tag assert.equal(result[0]?.frequencyRank, undefined); }); -test('annotateTokens excludes single hiragana and katakana tokens from frequency when POS tags are missing', () => { +test('annotateTokens clears all annotations from single hiragana and katakana surface fragments', () => { const tokens = [ makeToken({ surface: 'た', @@ -761,12 +1205,12 @@ test('annotateTokens excludes single hiragana and katakana tokens from frequency endPos: 1, }), makeToken({ - surface: 'ア', - reading: 'ア', - headword: 'ア', - pos1: '', + surface: 'フ', + reading: 'フ', + headword: '負', + pos1: '名詞', pos2: '', - partOfSpeech: PartOfSpeech.other, + partOfSpeech: PartOfSpeech.noun, frequencyRank: 22, startPos: 1, endPos: 2, @@ -788,8 +1232,14 @@ test('annotateTokens excludes single hiragana and katakana tokens from frequency minSentenceWordsForNPlusOne: 1, }); + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); + assert.equal(result[1]?.isKnown, false); + assert.equal(result[1]?.isNPlusOneTarget, false); assert.equal(result[1]?.frequencyRank, undefined); + assert.equal(result[1]?.jlptLevel, undefined); assert.equal(result[2]?.frequencyRank, 23); }); @@ -833,10 +1283,10 @@ test('annotateTokens allows previously default-excluded pos2 when removed from e }); assert.equal(result[0]?.frequencyRank, 9); - assert.equal(result[0]?.isNPlusOneTarget, true); + assert.equal(result[0]?.isNPlusOneTarget, false); }); -test('annotateTokens excludes composite function/content tokens from frequency but keeps N+1 eligible', () => { +test('annotateTokens excludes kana-only composite function/content tokens from frequency and N+1', () => { const tokens = [ makeToken({ surface: 'になれば', @@ -854,7 +1304,7 @@ test('annotateTokens excludes composite function/content tokens from frequency b }); assert.equal(result[0]?.frequencyRank, undefined); - assert.equal(result[0]?.isNPlusOneTarget, true); + assert.equal(result[0]?.isNPlusOneTarget, false); }); test('annotateTokens excludes composite tokens when all component pos tags are excluded', () => { @@ -908,7 +1358,7 @@ test('annotateTokens applies one shared exclusion gate across known N+1 frequenc assert.equal(result[0]?.jlptLevel, undefined); }); -test('annotateTokens clears all annotations for kana-only non-independent noun helper merges', () => { +test('annotateTokens clears known status and other annotations for kana-only non-independent noun helper merges', () => { const tokens = [ makeToken({ surface: 'ことに', @@ -938,7 +1388,414 @@ test('annotateTokens clears all annotations for kana-only non-independent noun h assert.equal(result[0]?.jlptLevel, undefined); }); -test('annotateTokens clears all annotations from standalone あ interjections without POS tags', () => { +test('annotateTokens clears known status and other annotations for standalone auxiliary inflection fragments', () => { + const tokens = [ + makeToken({ + surface: 'れる', + headword: 'れる', + reading: 'レル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '接尾', + startPos: 0, + endPos: 2, + frequencyRank: 18, + }), + makeToken({ + surface: 'れた', + headword: 'れる', + reading: 'レタ', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞|助動詞', + pos2: '接尾|*', + startPos: 2, + endPos: 4, + frequencyRank: 19, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'れる', + getJlptLevel: (text) => (text === 'れる' ? 'N4' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + for (const token of result) { + assert.equal(token.isKnown, false, token.surface); + assert.equal(token.isNPlusOneTarget, false, token.surface); + assert.equal(token.frequencyRank, undefined, token.surface); + assert.equal(token.jlptLevel, undefined, token.surface); + } +}); + +test('annotateTokens clears known status and other annotations for auxiliary-only te-kureru helper spans', () => { + const tokens = [ + makeToken({ + surface: 'てく', + headword: 'てく', + reading: 'テク', + partOfSpeech: PartOfSpeech.verb, + pos1: '助詞|動詞', + pos2: '接続助詞|非自立', + startPos: 0, + endPos: 2, + frequencyRank: 140, + }), + makeToken({ + surface: 'れた', + headword: 'れる', + reading: 'レタ', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞|助動詞', + pos2: '接尾|*', + startPos: 2, + endPos: 4, + frequencyRank: 19, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'てく' || text === 'れる', + getJlptLevel: (text) => (text === 'てく' || text === 'れる' ? 'N4' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + for (const token of result) { + assert.equal(token.isKnown, false, token.surface); + assert.equal(token.isNPlusOneTarget, false, token.surface); + assert.equal(token.frequencyRank, undefined, token.surface); + assert.equal(token.jlptLevel, undefined, token.surface); + } +}); + +test('annotateTokens keeps lexical くれる forms eligible for annotation', () => { + const tokens = [ + makeToken({ + surface: 'くれ', + headword: 'くれる', + reading: 'クレ', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + startPos: 0, + endPos: 2, + frequencyRank: 20, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + getJlptLevel: (text) => (text === 'くれる' ? 'N4' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, 20); + assert.equal(result[0]?.jlptLevel, 'N4'); +}); + +test('annotateTokens clears known status and other annotations for standalone して helper fragments', () => { + const tokens = [ + makeToken({ + surface: 'してる', + headword: 'する', + reading: 'シテル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞|助動詞', + pos2: '自立|非自立', + startPos: 0, + endPos: 3, + frequencyRank: 22, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'する', + getJlptLevel: (text) => (text === 'する' ? 'N5' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); +}); + +test('annotateTokens clears known status and other annotations for standalone particle fragments without POS tags', () => { + const tokens = [ + makeToken({ + surface: 'と', + headword: 'と', + reading: 'ト', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + startPos: 0, + endPos: 1, + frequencyRank: 4, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'と', + getJlptLevel: (text) => (text === 'と' ? 'N5' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); +}); + +test('annotateTokens clears known status from standalone particles even when the known-word cache contains them', () => { + const tokens = [ + makeToken({ + surface: 'に', + headword: 'に', + reading: 'ニ', + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + pos2: '格助詞', + startPos: 0, + endPos: 1, + frequencyRank: 2, + }), + makeToken({ + surface: '泉', + headword: '泉', + reading: 'イズミ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + startPos: 1, + endPos: 2, + frequencyRank: 50, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'に' || text === '泉', + getJlptLevel: (text) => (text === 'に' ? 'N5' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); + assert.equal(result[1]?.isKnown, true); +}); + +test('annotateTokens does not mark standalone connective particles as N+1', () => { + const tokens = [ + makeToken({ + surface: '逃げる', + headword: '逃げる', + reading: 'ニゲル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + startPos: 0, + endPos: 3, + }), + makeToken({ + surface: 'たって', + headword: 'たって', + reading: 'タッテ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + startPos: 3, + endPos: 6, + frequencyRank: 28, + }), + makeToken({ + surface: '無駄', + headword: '無駄', + reading: 'ムダ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '形容動詞語幹', + startPos: 6, + endPos: 8, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '逃げる' || text === '無駄', + getJlptLevel: (text) => (text === 'たって' ? 'N3' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[1]?.isKnown, false); + assert.equal(result[1]?.isNPlusOneTarget, false); + assert.equal(result[1]?.frequencyRank, undefined); + assert.equal(result[1]?.jlptLevel, undefined); +}); + +test('annotateTokens clears known status and other annotations for rhetorical もんか grammar particle phrases', () => { + const tokens = [ + makeToken({ + surface: 'もんか', + headword: 'もんか', + reading: 'モンカ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞|助詞', + pos2: '非自立|副助詞/並立助詞/終助詞', + startPos: 0, + endPos: 3, + frequencyRank: 69629, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'もんか', + getJlptLevel: (text) => (text === 'もんか' ? 'N2' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); +}); + +test('annotateTokens clears known status and other annotations for bare くれ auxiliary fragments', () => { + const tokens = [ + makeToken({ + surface: 'くれ', + headword: '暮れ', + reading: 'クレ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + startPos: 0, + endPos: 2, + frequencyRank: 12877, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '暮れ', + getJlptLevel: (text) => (text === '暮れ' ? 'N3' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); +}); + +test('annotateTokens clears known status and other annotations for aru existence verbs', () => { + const tokens = [ + makeToken({ + surface: '有る', + headword: '有る', + reading: 'アル', + partOfSpeech: PartOfSpeech.verb, + pos1: '動詞', + pos2: '自立', + startPos: 0, + endPos: 2, + frequencyRank: 8447, + isKnown: true, + isNPlusOneTarget: true, + isNameMatch: true, + jlptLevel: 'N5', + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === '有る' || text === 'ある', + getJlptLevel: (text) => (text === '有る' || text === 'ある' ? 'N5' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + assert.equal(result[0]?.surface, '有る'); + assert.equal(result[0]?.headword, '有る'); + assert.equal(result[0]?.isKnown, false); + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[0]?.isNameMatch, false); + assert.equal(result[0]?.frequencyRank, undefined); + assert.equal(result[0]?.jlptLevel, undefined); +}); + +test('annotateTokens clears known status and other annotations for standalone quote particle and auxiliary grammar terms', () => { + const tokens = [ + makeToken({ + surface: 'って', + headword: 'って', + reading: 'ッテ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + startPos: 0, + endPos: 2, + frequencyRank: 28, + }), + makeToken({ + surface: 'べき', + headword: 'べき', + reading: 'ベキ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + startPos: 2, + endPos: 4, + frequencyRank: 268, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'って' || text === 'べき', + getJlptLevel: (text) => (text === 'って' || text === 'べき' ? 'N3' : null), + }), + { minSentenceWordsForNPlusOne: 1 }, + ); + + for (const token of result) { + assert.equal(token.isKnown, false, token.surface); + assert.equal(token.isNPlusOneTarget, false, token.surface); + assert.equal(token.frequencyRank, undefined, token.surface); + assert.equal(token.jlptLevel, undefined, token.surface); + } +}); + +test('annotateTokens clears known status and other annotations from standalone あ interjections without POS tags', () => { const tokens = [ makeToken({ surface: 'あ', @@ -973,3 +1830,67 @@ test('annotateTokens clears all annotations from standalone あ interjections wi assert.equal(result[0]?.frequencyRank, undefined); assert.equal(result[0]?.jlptLevel, undefined); }); + +test('annotateTokens clears all annotations from expressive subtitle interjections without POS tags', () => { + const tokens = [ + makeToken({ + surface: 'ハァ', + headword: 'ハァ', + reading: 'ハァ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + startPos: 0, + endPos: 2, + isKnown: true, + isNPlusOneTarget: true, + frequencyRank: 3007, + jlptLevel: 'N5', + }), + makeToken({ + surface: 'はっ', + headword: 'はっ', + reading: 'ハッ', + partOfSpeech: PartOfSpeech.other, + pos1: '', + pos2: '', + startPos: 10, + endPos: 12, + isKnown: true, + isNPlusOneTarget: true, + frequencyRank: 3007, + jlptLevel: 'N5', + }), + makeToken({ + surface: '猫', + headword: '猫', + reading: 'ネコ', + partOfSpeech: PartOfSpeech.noun, + pos1: '名詞', + pos2: '一般', + startPos: 13, + endPos: 14, + frequencyRank: 11, + }), + ]; + + const result = annotateTokens( + tokens, + makeDeps({ + isKnownWord: (text) => text === 'ハァ' || text === 'はっ', + getJlptLevel: (text) => (text === 'ハァ' || text === 'はっ' ? 'N5' : null), + }), + { + minSentenceWordsForNPlusOne: 1, + sourceText: 'ハァ…\n(ガーフィール)はっ! 猫', + }, + ); + + for (const token of result.slice(0, 2)) { + assert.equal(token.isKnown, false, token.surface); + assert.equal(token.isNPlusOneTarget, false, token.surface); + assert.equal(token.frequencyRank, undefined, token.surface); + assert.equal(token.jlptLevel, undefined, token.surface); + } + assert.equal(result[2]?.frequencyRank, 11); +}); diff --git a/src/core/services/tokenizer/annotation-stage.ts b/src/core/services/tokenizer/annotation-stage.ts index 7e08e923..f1cd6bb6 100644 --- a/src/core/services/tokenizer/annotation-stage.ts +++ b/src/core/services/tokenizer/annotation-stage.ts @@ -18,57 +18,6 @@ const KATAKANA_TO_HIRAGANA_OFFSET = 0x60; const KATAKANA_CODEPOINT_START = 0x30a1; const KATAKANA_CODEPOINT_END = 0x30f6; const JLPT_LEVEL_LOOKUP_CACHE_LIMIT = 2048; -const SUBTITLE_ANNOTATION_EXCLUDED_TERMS = new Set([ - 'ああ', - 'ええ', - 'うう', - 'おお', - 'はあ', - 'はは', - 'へえ', - 'ふう', - 'ほう', -]); -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_PREFIXES = ['ん', 'の', 'なん', 'なの']; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_CORES = [ - 'だ', - 'です', - 'でした', - 'だった', - 'では', - 'じゃ', - 'でしょう', - 'だろう', -] as const; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_TRAILING_PARTICLES = [ - '', - 'か', - 'ね', - 'よ', - 'な', - 'けど', - 'よね', - 'かな', - 'かね', -] as const; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS = new Set( - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_PREFIXES.flatMap((prefix) => - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_CORES.flatMap((core) => - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_TRAILING_PARTICLES.map( - (particle) => `${prefix}${core}${particle}`, - ), - ), - ), -); -const SUBTITLE_ANNOTATION_EXCLUDED_TRAILING_PARTICLE_SUFFIXES = new Set([ - 'って', - 'ってよ', - 'ってね', - 'ってな', - 'ってさ', - 'ってか', - 'ってば', -]); const jlptLevelLookupCaches = new WeakMap< (text: string) => JlptLevel | null, @@ -89,6 +38,7 @@ export interface AnnotationStageOptions { minSentenceWordsForNPlusOne?: number; pos1Exclusions?: ReadonlySet; pos2Exclusions?: ReadonlySet; + sourceText?: string; } function resolveKnownWordText( @@ -103,10 +53,6 @@ function normalizePos1Tag(pos1: string | undefined): string { return typeof pos1 === 'string' ? pos1.trim() : ''; } -const SUBTITLE_ANNOTATION_EXCLUDED_POS1 = new Set(['感動詞']); -const SUBTITLE_ANNOTATION_GRAMMAR_ONLY_POS1 = new Set(['助詞', '助動詞', '連体詞']); -const AUXILIARY_STEM_GRAMMAR_TAIL_POS1 = new Set(['名詞', '助動詞', '助詞']); - function splitNormalizedTagParts(normalizedTag: string): string[] { if (!normalizedTag) { return []; @@ -128,57 +74,6 @@ function isExcludedByTagSet(normalizedTag: string, exclusions: ReadonlySet exclusions.has(part)); } -function isExcludedFromSubtitleAnnotationsByPos1(normalizedPos1: string): boolean { - const parts = splitNormalizedTagParts(normalizedPos1); - if (parts.some((part) => SUBTITLE_ANNOTATION_EXCLUDED_POS1.has(part))) { - return true; - } - - return parts.length > 0 && parts.every((part) => SUBTITLE_ANNOTATION_GRAMMAR_ONLY_POS1.has(part)); -} - -function isExcludedTrailingParticleMergedToken(token: MergedToken): boolean { - const normalizedSurface = normalizeJlptTextForExclusion(token.surface); - const normalizedHeadword = normalizeJlptTextForExclusion(token.headword); - if ( - !normalizedSurface || - !normalizedHeadword || - !normalizedSurface.startsWith(normalizedHeadword) - ) { - return false; - } - - const suffix = normalizedSurface.slice(normalizedHeadword.length); - if (!SUBTITLE_ANNOTATION_EXCLUDED_TRAILING_PARTICLE_SUFFIXES.has(suffix)) { - return false; - } - - const pos1Parts = splitNormalizedTagParts(normalizePos1Tag(token.pos1)); - if (pos1Parts.length < 2) { - return false; - } - - const [leadingPos1, ...trailingPos1] = pos1Parts; - if (!leadingPos1 || SUBTITLE_ANNOTATION_GRAMMAR_ONLY_POS1.has(leadingPos1)) { - return false; - } - - return trailingPos1.length > 0 && trailingPos1.every((part) => part === '助詞'); -} - -function isAuxiliaryStemGrammarTailToken(token: MergedToken): boolean { - const pos1Parts = splitNormalizedTagParts(normalizePos1Tag(token.pos1)); - if ( - pos1Parts.length === 0 || - !pos1Parts.every((part) => AUXILIARY_STEM_GRAMMAR_TAIL_POS1.has(part)) - ) { - return false; - } - - const pos3Parts = splitNormalizedTagParts(normalizePos2Tag(token.pos3)); - return pos3Parts.includes('助動詞語幹'); -} - function resolvePos1Exclusions(options: AnnotationStageOptions): ReadonlySet { if (options.pos1Exclusions) { return options.pos1Exclusions; @@ -254,6 +149,45 @@ function shouldAllowContentLedMergedTokenFrequency( return true; } +function shouldAllowOrdinalPrefixNounFrequency(token: MergedToken): boolean { + const normalizedSurface = token.surface.trim(); + const normalizedHeadword = token.headword.trim(); + if (!normalizedSurface.startsWith('第') && !normalizedHeadword.startsWith('第')) { + return false; + } + + const pos1Parts = splitNormalizedTagParts(normalizePos1Tag(token.pos1)); + const pos2Parts = splitNormalizedTagParts(normalizePos2Tag(token.pos2)); + return ( + pos1Parts.length >= 2 && + pos1Parts[0] === '接頭詞' && + pos1Parts.slice(1).some((part) => part === '名詞') && + pos2Parts[0] === '数接続' && + pos2Parts.slice(1).some((part) => part === '数') + ); +} + +function shouldAllowHonorificPrefixNounFrequency(token: MergedToken): boolean { + const normalizedSurface = token.surface.trim(); + const normalizedHeadword = token.headword.trim(); + if ( + !['お', 'ご', '御'].some( + (prefix) => normalizedSurface.startsWith(prefix) || normalizedHeadword.startsWith(prefix), + ) + ) { + return false; + } + + const pos1Parts = splitNormalizedTagParts(normalizePos1Tag(token.pos1)); + const pos2Parts = splitNormalizedTagParts(normalizePos2Tag(token.pos2)); + return ( + pos1Parts.length >= 2 && + pos1Parts[0] === '接頭詞' && + pos1Parts.slice(1).some((part) => part === '名詞') && + pos2Parts[0] === '名詞接続' + ); +} + function isFrequencyExcludedByPos( token: MergedToken, pos1Exclusions: ReadonlySet, @@ -273,12 +207,24 @@ function isFrequencyExcludedByPos( pos1Exclusions, pos2Exclusions, ); + const allowOrdinalPrefixNounToken = shouldAllowOrdinalPrefixNounFrequency(token); + const allowHonorificPrefixNounToken = shouldAllowHonorificPrefixNounFrequency(token); - if (isExcludedByTagSet(normalizedPos1, pos1Exclusions) && !allowContentLedMergedToken) { + if ( + isExcludedByTagSet(normalizedPos1, pos1Exclusions) && + !allowContentLedMergedToken && + !allowOrdinalPrefixNounToken && + !allowHonorificPrefixNounToken + ) { return true; } - if (isExcludedByTagSet(normalizedPos2, pos2Exclusions) && !allowContentLedMergedToken) { + if ( + isExcludedByTagSet(normalizedPos2, pos2Exclusions) && + !allowContentLedMergedToken && + !allowOrdinalPrefixNounToken && + !allowHonorificPrefixNounToken + ) { return true; } @@ -608,50 +554,15 @@ function isJlptEligibleToken(token: MergedToken): boolean { return true; } -function isExcludedFromSubtitleAnnotationsByTerm(token: MergedToken): boolean { - const candidates = [token.surface, token.reading, resolveJlptLookupText(token)].filter( - (candidate): candidate is string => typeof candidate === 'string' && candidate.length > 0, - ); - - for (const candidate of candidates) { - const trimmedCandidate = candidate.trim(); - if (!trimmedCandidate) { - continue; - } - - const normalizedCandidate = normalizeJlptTextForExclusion(trimmedCandidate); - if (!normalizedCandidate) { - continue; - } - - if ( - SUBTITLE_ANNOTATION_EXCLUDED_TERMS.has(trimmedCandidate) || - SUBTITLE_ANNOTATION_EXCLUDED_TERMS.has(normalizedCandidate) || - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS.has(trimmedCandidate) || - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS.has(normalizedCandidate) - ) { - return true; - } - - if ( - isTrailingSmallTsuKanaSfx(trimmedCandidate) || - isTrailingSmallTsuKanaSfx(normalizedCandidate) || - isReduplicatedKanaSfxWithOptionalTrailingTo(trimmedCandidate) || - isReduplicatedKanaSfxWithOptionalTrailingTo(normalizedCandidate) - ) { - return true; - } - } - - return false; -} - export function shouldExcludeTokenFromSubtitleAnnotations(token: MergedToken): boolean { return sharedShouldExcludeTokenFromSubtitleAnnotations(token); } -export function stripSubtitleAnnotationMetadata(token: MergedToken): MergedToken { - return sharedStripSubtitleAnnotationMetadata(token); +export function stripSubtitleAnnotationMetadata( + token: MergedToken, + options: AnnotationStageOptions = {}, +): MergedToken { + return sharedStripSubtitleAnnotationMetadata(token, options); } function computeTokenKnownStatus( @@ -734,10 +645,14 @@ export function annotateTokens( pos2Exclusions, }) ) { - return sharedStripSubtitleAnnotationMetadata(token, { + const strippedToken = sharedStripSubtitleAnnotationMetadata(token, { pos1Exclusions, pos2Exclusions, }); + return { + ...strippedToken, + isKnown: false, + }; } const prioritizedNameMatch = nameMatchEnabled && token.isNameMatch === true; @@ -781,6 +696,7 @@ export function annotateTokens( sanitizedMinSentenceWordsForNPlusOne, pos1Exclusions, pos2Exclusions, + options.sourceText, ); if (!nameMatchEnabled) { diff --git a/src/core/services/tokenizer/grammar-ending.ts b/src/core/services/tokenizer/grammar-ending.ts new file mode 100644 index 00000000..3f833b15 --- /dev/null +++ b/src/core/services/tokenizer/grammar-ending.ts @@ -0,0 +1,124 @@ +const KATAKANA_TO_HIRAGANA_OFFSET = 0x60; +const KATAKANA_CODEPOINT_START = 0x30a1; +const KATAKANA_CODEPOINT_END = 0x30f6; + +const SENTENCE_FINAL_PARTICLE_SUFFIXES = ['', 'か', 'ね', 'よ', 'な', 'わ'] as const; +const EXPLANATORY_ENDING_PREFIXES = ['ん', 'の', 'なん', 'なの'] as const; +const EXPLANATORY_ENDING_CORES = [ + 'だ', + 'です', + 'でした', + 'だった', + 'では', + 'じゃ', + 'でしょう', + 'だろう', +] as const; +const EXPLANATORY_ENDING_TRAILING_PARTICLES = [ + '', + 'か', + 'ね', + 'よ', + 'な', + 'けど', + 'よね', + 'かな', + 'かね', +] as const; +const EXPLANATORY_ENDING_THOUGHT_SUFFIXES = ['か', 'かな', 'かね'] as const; +const NEGATIVE_COPULA_PREFIXES = ['じゃ', 'では'] as const; + +export function normalizeGrammarEndingText(text: string): string { + const raw = text.trim(); + if (!raw) { + return ''; + } + + let normalized = ''; + for (const char of raw) { + const code = char.codePointAt(0); + if (code === undefined) { + continue; + } + + if (code >= KATAKANA_CODEPOINT_START && code <= KATAKANA_CODEPOINT_END) { + normalized += String.fromCodePoint(code - KATAKANA_TO_HIRAGANA_OFFSET); + continue; + } + + normalized += char; + } + + return normalized; +} + +function matchesSuffix(text: string, suffixes: readonly string[]): boolean { + return suffixes.some((suffix) => text === suffix); +} + +function matchesPoliteCopulaEnding(text: string): boolean { + if (!text.startsWith('です')) { + return false; + } + + return matchesSuffix(text.slice('です'.length), SENTENCE_FINAL_PARTICLE_SUFFIXES); +} + +function matchesNegativeCopulaEnding(text: string): boolean { + for (const prefix of NEGATIVE_COPULA_PREFIXES) { + const negativeStem = `${prefix}ない`; + if (!text.startsWith(negativeStem)) { + continue; + } + + const suffix = text.slice(negativeStem.length); + return ( + matchesSuffix(suffix, SENTENCE_FINAL_PARTICLE_SUFFIXES) || matchesPoliteCopulaEnding(suffix) + ); + } + + return false; +} + +function matchesExplanatoryEnding(text: string): boolean { + for (const prefix of EXPLANATORY_ENDING_PREFIXES) { + if (EXPLANATORY_ENDING_THOUGHT_SUFFIXES.some((suffix) => text === `${prefix}${suffix}`)) { + return true; + } + + if (!text.startsWith(prefix)) { + continue; + } + + const suffix = text.slice(prefix.length); + for (const core of EXPLANATORY_ENDING_CORES) { + if (!suffix.startsWith(core)) { + continue; + } + + if (matchesSuffix(suffix.slice(core.length), EXPLANATORY_ENDING_TRAILING_PARTICLES)) { + return true; + } + } + } + + return false; +} + +export function isStandaloneGrammarEndingText(text: string): boolean { + const normalized = normalizeGrammarEndingText(text); + if (!normalized) { + return false; + } + + return matchesPoliteCopulaEnding(normalized) || matchesNegativeCopulaEnding(normalized); +} + +export function isSubtitleGrammarEndingText(text: string): boolean { + const normalized = normalizeGrammarEndingText(text); + if (!normalized) { + return false; + } + + return isStandaloneGrammarEndingText(normalized) || matchesExplanatoryEnding(normalized); +} diff --git a/src/core/services/tokenizer/parser-enrichment-stage.test.ts b/src/core/services/tokenizer/parser-enrichment-stage.test.ts index 5fc47233..b4df9cd7 100644 --- a/src/core/services/tokenizer/parser-enrichment-stage.test.ts +++ b/src/core/services/tokenizer/parser-enrichment-stage.test.ts @@ -39,6 +39,33 @@ test('enrichTokensWithMecabPos1 fills missing pos1 using surface-sequence fallba assert.equal(enriched[0]?.pos1, '助詞'); }); +test('enrichTokensWithMecabPos1 backfills blank pos2 and pos3 fields', () => { + const tokens = [ + makeToken({ + surface: 'は', + startPos: 0, + endPos: 1, + pos1: '助詞', + pos2: '', + pos3: ' ', + }), + ]; + const mecabTokens = [ + makeToken({ + surface: 'は', + startPos: 0, + endPos: 1, + pos1: '助詞', + pos2: '係助詞', + pos3: '一般', + }), + ]; + + const enriched = enrichTokensWithMecabPos1(tokens, mecabTokens); + assert.equal(enriched[0]?.pos2, '係助詞'); + assert.equal(enriched[0]?.pos3, '一般'); +}); + test('enrichTokensWithMecabPos1 keeps partOfSpeech unchanged and only enriches POS tags', () => { const tokens = [makeToken({ surface: 'これは', startPos: 0, endPos: 3 })]; const mecabTokens = [ diff --git a/src/core/services/tokenizer/parser-enrichment-stage.ts b/src/core/services/tokenizer/parser-enrichment-stage.ts index 4782f940..3f32741e 100644 --- a/src/core/services/tokenizer/parser-enrichment-stage.ts +++ b/src/core/services/tokenizer/parser-enrichment-stage.ts @@ -120,6 +120,13 @@ function lowerBoundByIndex(candidates: IndexedMecabToken[], targetIndex: number) return low; } +function coalesceMissingPosField( + current: string | undefined, + fallback: string | undefined, +): string | undefined { + return typeof current === 'string' && current.trim().length > 0 ? current : fallback; +} + function joinUniqueTags(values: Array): string | undefined { const unique: string[] = []; for (const value of values) { @@ -303,7 +310,8 @@ function fillMissingPos1BySurfaceSequence( let cursor = 0; return tokens.map((token) => { - if (token.pos1 && token.pos1.trim().length > 0) { + const hasCompletePosMetadata = token.pos1?.trim() && token.pos2?.trim() && token.pos3?.trim(); + if (hasCompletePosMetadata) { return token; } @@ -327,9 +335,9 @@ function fillMissingPos1BySurfaceSequence( cursor = best.index + 1; return { ...token, - pos1: best.pos1, - pos2: best.pos2, - pos3: best.pos3, + pos1: coalesceMissingPosField(token.pos1, best.pos1), + pos2: coalesceMissingPosField(token.pos2, best.pos2), + pos3: coalesceMissingPosField(token.pos3, best.pos3), }; }); } @@ -382,7 +390,7 @@ export function enrichTokensWithMecabPos1( const metadataByTokenIndex = new Map(); for (const [index, token] of tokens.entries()) { - if (token.pos1) { + if (token.pos1?.trim() && token.pos2?.trim() && token.pos3?.trim()) { continue; } @@ -410,9 +418,9 @@ export function enrichTokensWithMecabPos1( return { ...token, - pos1: metadata.pos1, - pos2: metadata.pos2, - pos3: metadata.pos3, + pos1: coalesceMissingPosField(token.pos1, metadata.pos1), + pos2: coalesceMissingPosField(token.pos2, metadata.pos2), + pos3: coalesceMissingPosField(token.pos3, metadata.pos3), }; }); diff --git a/src/core/services/tokenizer/parser-selection-stage.test.ts b/src/core/services/tokenizer/parser-selection-stage.test.ts index 8ffb8f61..5a58afa3 100644 --- a/src/core/services/tokenizer/parser-selection-stage.test.ts +++ b/src/core/services/tokenizer/parser-selection-stage.test.ts @@ -155,7 +155,7 @@ test('prefers the longest dictionary headword across merged segments', () => { ); }); -test('keeps the first headword when later segments are standalone words', () => { +test('splits trailing grammar endings when later segments are standalone words', () => { const parseResults = [ makeParseItem('scanning-parser', [ [ @@ -174,10 +174,111 @@ test('keeps the first headword when later segments are standalone words', () => })), [ { - surface: '猫です', - reading: 'ねこです', + surface: '猫', + reading: 'ねこ', headword: '猫', }, + { + surface: 'です', + reading: 'です', + headword: 'です', + }, + ], + ); +}); + +test('keeps preceding reading when standalone grammar ending has empty reading', () => { + const parseResults = [ + makeParseItem('scanning-parser', [ + [ + { text: '猫', reading: 'ねこ', headword: '猫' }, + { text: 'です', reading: '', headword: 'です' }, + ], + ]), + ]; + + const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword'); + assert.deepEqual( + tokens?.map((token) => ({ + surface: token.surface, + reading: token.reading, + headword: token.headword, + })), + [ + { + surface: '猫', + reading: 'ねこ', + headword: '猫', + }, + { + surface: 'です', + reading: '', + headword: 'です', + }, + ], + ); +}); + +test('splits trailing ja-nai grammar endings from preceding content', () => { + const parseResults = [ + makeParseItem('scanning-parser', [ + [ + { text: 'いる', reading: 'いる', headword: 'いる' }, + { text: 'じゃない', reading: 'じゃない', headword: 'じゃない' }, + ], + ]), + ]; + + const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword'); + assert.deepEqual( + tokens?.map((token) => ({ + surface: token.surface, + reading: token.reading, + headword: token.headword, + })), + [ + { + surface: 'いる', + reading: 'いる', + headword: 'いる', + }, + { + surface: 'じゃない', + reading: 'じゃない', + headword: 'じゃない', + }, + ], + ); +}); + +test('splits trailing negative-copula grammar endings by pattern', () => { + const parseResults = [ + makeParseItem('scanning-parser', [ + [ + { text: '問題', reading: 'もんだい', headword: '問題' }, + { text: 'ではないですか', reading: 'ではないですか', headword: 'ない' }, + ], + ]), + ]; + + const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword'); + assert.deepEqual( + tokens?.map((token) => ({ + surface: token.surface, + reading: token.reading, + headword: token.headword, + })), + [ + { + surface: '問題', + reading: 'もんだい', + headword: '問題', + }, + { + surface: 'ではないですか', + reading: 'ではないですか', + headword: 'ない', + }, ], ); }); diff --git a/src/core/services/tokenizer/parser-selection-stage.ts b/src/core/services/tokenizer/parser-selection-stage.ts index 894e640e..1f0e9eda 100644 --- a/src/core/services/tokenizer/parser-selection-stage.ts +++ b/src/core/services/tokenizer/parser-selection-stage.ts @@ -1,4 +1,5 @@ import { MergedToken, NPlusOneMatchMode, PartOfSpeech } from '../../../types'; +import { isStandaloneGrammarEndingText } from './grammar-ending'; interface YomitanParseHeadword { term?: unknown; @@ -141,6 +142,15 @@ function isKanaOnlyText(text: string): boolean { return text.length > 0 && Array.from(text).every((char) => isKanaChar(char)); } +function isStandaloneGrammarEndingSegment(segment: YomitanParseSegment): boolean { + const surface = segment.text?.trim() ?? ''; + const headword = extractYomitanHeadword(segment).trim(); + return ( + headword.length > 0 && + (isStandaloneGrammarEndingText(surface) || isStandaloneGrammarEndingText(headword)) + ); +} + function shouldMergeKanaContinuation( previousToken: MergedToken | undefined, continuationSurface: string, @@ -186,20 +196,97 @@ export function mapYomitanParseResultItemToMergedTokens( let combinedSurface = ''; let combinedReading = ''; + let combinedStart = charOffset; let firstHeadword = ''; const expandedHeadwords: string[] = []; + const pushToken = ( + surface: string, + reading: string, + headword: string, + start: number, + end: number, + ): void => { + tokens.push({ + surface, + reading, + headword, + startPos: start, + endPos: end, + partOfSpeech: PartOfSpeech.other, + pos1: '', + isMerged: true, + isNPlusOneTarget: false, + isKnown: (() => { + const matchText = resolveKnownWordText(surface, headword, knownWordMatchMode); + return matchText ? isKnownWord(matchText) : false; + })(), + }); + }; + + const flushCombinedToken = (end: number): void => { + if (!combinedSurface) { + combinedStart = end; + return; + } + + const combinedHeadword = selectMergedHeadword( + firstHeadword, + expandedHeadwords, + combinedSurface, + ); + if (!combinedHeadword) { + const previousToken = tokens[tokens.length - 1]; + if (shouldMergeKanaContinuation(previousToken, combinedSurface)) { + previousToken.surface += combinedSurface; + previousToken.reading += combinedReading; + previousToken.endPos = end; + } + } else { + hasDictionaryMatch = true; + pushToken(combinedSurface, combinedReading, combinedHeadword, combinedStart, end); + } + + combinedSurface = ''; + combinedReading = ''; + firstHeadword = ''; + expandedHeadwords.length = 0; + combinedStart = end; + }; + for (const segment of line) { const segmentText = segment.text; if (!segmentText || segmentText.length === 0) { continue; } + const segmentStart = charOffset; + const segmentEnd = segmentStart + segmentText.length; + charOffset = segmentEnd; combinedSurface += segmentText; if (typeof segment.reading === 'string') { combinedReading += segment.reading; } const segmentHeadword = extractYomitanHeadword(segment); + if (isStandaloneGrammarEndingSegment(segment)) { + combinedSurface = combinedSurface.slice(0, -segmentText.length); + if (typeof segment.reading === 'string' && segment.reading.length > 0) { + combinedReading = combinedReading.slice(0, -segment.reading.length); + } + flushCombinedToken(segmentStart); + const grammarHeadword = segmentHeadword || segmentText; + hasDictionaryMatch = true; + pushToken( + segmentText, + typeof segment.reading === 'string' ? segment.reading : '', + grammarHeadword, + segmentStart, + segmentEnd, + ); + combinedStart = segmentEnd; + continue; + } + if (segmentHeadword) { if (!firstHeadword) { firstHeadword = segmentHeadword; @@ -210,49 +297,7 @@ export function mapYomitanParseResultItemToMergedTokens( } } - if (!combinedSurface) { - continue; - } - - const start = charOffset; - const end = start + combinedSurface.length; - charOffset = end; - const combinedHeadword = selectMergedHeadword( - firstHeadword, - expandedHeadwords, - combinedSurface, - ); - if (!combinedHeadword) { - const previousToken = tokens[tokens.length - 1]; - if (shouldMergeKanaContinuation(previousToken, combinedSurface)) { - previousToken.surface += combinedSurface; - previousToken.reading += combinedReading; - previousToken.endPos = end; - continue; - } - - // No dictionary-backed headword for this merged unit; skip it entirely so - // downstream keyboard/frequency/JLPT flows only operate on lookup-backed tokens. - continue; - } - hasDictionaryMatch = true; - const headword = combinedHeadword; - - tokens.push({ - surface: combinedSurface, - reading: combinedReading, - headword, - startPos: start, - endPos: end, - partOfSpeech: PartOfSpeech.other, - pos1: '', - isMerged: true, - isNPlusOneTarget: false, - isKnown: (() => { - const matchText = resolveKnownWordText(combinedSurface, headword, knownWordMatchMode); - return matchText ? isKnownWord(matchText) : false; - })(), - }); + flushCombinedToken(charOffset); } if (validLineCount === 0 || tokens.length === 0 || !hasDictionaryMatch) { diff --git a/src/core/services/tokenizer/subtitle-annotation-filter.ts b/src/core/services/tokenizer/subtitle-annotation-filter.ts index c37627e3..927a0364 100644 --- a/src/core/services/tokenizer/subtitle-annotation-filter.ts +++ b/src/core/services/tokenizer/subtitle-annotation-filter.ts @@ -8,14 +8,21 @@ import { } from '../../../token-pos2-exclusions'; import { MergedToken, PartOfSpeech } from '../../../types'; import { shouldIgnoreJlptByTerm } from '../jlpt-token-filter'; +import { isSubtitleGrammarEndingText } from './grammar-ending'; const KATAKANA_TO_HIRAGANA_OFFSET = 0x60; const KATAKANA_CODEPOINT_START = 0x30a1; const KATAKANA_CODEPOINT_END = 0x30f6; +const STANDALONE_GRAMMAR_PARTICLE_PHRASES = ['たって', 'だって'] as const; +const STANDALONE_GRAMMAR_PARTICLE_PHRASES_SET: ReadonlySet = new Set( + STANDALONE_GRAMMAR_PARTICLE_PHRASES, +); + export const SUBTITLE_ANNOTATION_EXCLUDED_TERMS = new Set([ 'あ', 'ああ', + 'ある', 'あなた', 'あんた', 'ええ', @@ -25,6 +32,7 @@ export const SUBTITLE_ANNOTATION_EXCLUDED_TERMS = new Set([ 'お前', 'こいつ', 'こっち', + 'くれ', 'じゃない', 'そうだ', 'たち', @@ -32,58 +40,27 @@ export const SUBTITLE_ANNOTATION_EXCLUDED_TERMS = new Set([ 'どこか', 'なんか', 'べき', + 'って', 'はあ', + 'はぁ', 'はは', 'へえ', 'ふう', 'ほう', 'やはり', - 'って', '何か', '何だ', '何も', '如何した', + '有る', + '在る', '様', '確かに', '誰も', '貴方', + 'もんか', + 'ものか', ]); -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_PREFIXES = ['ん', 'の', 'なん', 'なの']; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_CORES = [ - 'だ', - 'です', - 'でした', - 'だった', - 'では', - 'じゃ', - 'でしょう', - 'だろう', -] as const; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_TRAILING_PARTICLES = [ - '', - 'か', - 'ね', - 'よ', - 'な', - 'けど', - 'よね', - 'かな', - 'かね', -] as const; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_THOUGHT_SUFFIXES = [ - 'か', - 'かな', - 'かね', -] as const; -const SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS = new Set( - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_PREFIXES.flatMap((prefix) => - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_CORES.flatMap((core) => - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_TRAILING_PARTICLES.map( - (particle) => `${prefix}${core}${particle}`, - ), - ), - ), -); const SUBTITLE_ANNOTATION_EXCLUDED_TRAILING_PARTICLE_SUFFIXES = new Set([ 'って', 'ってよ', @@ -95,7 +72,28 @@ const SUBTITLE_ANNOTATION_EXCLUDED_TRAILING_PARTICLE_SUFFIXES = new Set([ ]); const AUXILIARY_STEM_GRAMMAR_TAIL_POS1 = new Set(['名詞', '助動詞', '助詞']); const NON_INDEPENDENT_NOUN_HELPER_TAIL_POS1 = new Set(['助詞', '助動詞']); - +const AUXILIARY_INFLECTION_TRAILING_POS1 = new Set(['助動詞']); +const AUXILIARY_HELPER_SPAN_POS1 = new Set(['助詞', '助動詞', '動詞']); +const LEXICAL_VERB_POS2 = new Set(['自立']); +const STANDALONE_GRAMMAR_PARTICLE_SURFACES = new Set([ + 'か', + 'が', + 'さ', + 'し', + 'ぞ', + 'ぜ', + 'と', + 'な', + 'に', + 'ね', + 'の', + 'は', + 'へ', + 'も', + 'や', + 'よ', + 'を', +]); export interface SubtitleAnnotationFilterOptions { pos1Exclusions?: ReadonlySet; pos2Exclusions?: ReadonlySet; @@ -301,6 +299,99 @@ function isKanaOnlyNonIndependentNounHelperMerge(token: MergedToken): boolean { return pos1Parts.slice(1).every((part) => NON_INDEPENDENT_NOUN_HELPER_TAIL_POS1.has(part)); } +function isKanaOnlyText(text: string): boolean { + const normalized = normalizeKana(text); + return normalized.length > 0 && [...normalized].every(isKanaChar); +} + +function isLexicalKureruVerb(token: MergedToken): boolean { + const normalizedSurface = normalizeKana(token.surface); + const normalizedHeadword = normalizeKana(token.headword); + const pos1Parts = splitNormalizedTagParts(normalizePosTag(token.pos1)); + const pos2Parts = splitNormalizedTagParts(normalizePosTag(token.pos2)); + return ( + normalizedSurface === 'くれ' && + normalizedHeadword === 'くれる' && + pos1Parts.length === 1 && + pos1Parts[0] === '動詞' && + pos2Parts.length === 1 && + pos2Parts[0] === '自立' + ); +} + +function isStandaloneAuxiliaryInflectionFragment(token: MergedToken): boolean { + const normalizedSurface = normalizeKana(token.surface); + if (!isKanaOnlyText(normalizedSurface)) { + return false; + } + + const pos1Parts = splitNormalizedTagParts(normalizePosTag(token.pos1)); + if (pos1Parts.length === 0) { + return false; + } + + if (pos1Parts.every((part) => part === '助動詞')) { + return true; + } + + const pos2Parts = splitNormalizedTagParts(normalizePosTag(token.pos2)); + return ( + pos1Parts[0] === '動詞' && + pos2Parts[0] === '接尾' && + pos1Parts.slice(1).every((part) => AUXILIARY_INFLECTION_TRAILING_POS1.has(part)) + ); +} + +function isAuxiliaryOnlyHelperSpan(token: MergedToken): boolean { + const normalizedSurface = normalizeKana(token.surface); + const normalizedHeadword = normalizeKana(token.headword); + if (!isKanaOnlyText(normalizedSurface) || !isKanaOnlyText(normalizedHeadword)) { + return false; + } + + const pos1Parts = splitNormalizedTagParts(normalizePosTag(token.pos1)); + if ( + pos1Parts.length === 0 || + !pos1Parts.every((part) => AUXILIARY_HELPER_SPAN_POS1.has(part)) || + !pos1Parts.includes('助詞') || + !pos1Parts.includes('動詞') + ) { + return false; + } + + const pos2Parts = splitNormalizedTagParts(normalizePosTag(token.pos2)); + return !pos2Parts.some((part) => LEXICAL_VERB_POS2.has(part)); +} + +function isStandaloneSuruTeGrammarHelper(token: MergedToken): boolean { + const normalizedSurface = normalizeKana(token.surface); + const normalizedHeadword = normalizeKana(token.headword); + if (!normalizedSurface.startsWith('して') || normalizedHeadword !== 'する') { + return false; + } + + const pos1Parts = splitNormalizedTagParts(normalizePosTag(token.pos1)); + return ( + isKanaOnlyText(normalizedSurface) && (pos1Parts.length === 0 || pos1Parts.includes('動詞')) + ); +} + +function isStandaloneGrammarParticle(token: MergedToken): boolean { + const normalizedSurface = normalizeKana(token.surface); + const normalizedHeadword = normalizeKana(token.headword); + return ( + normalizedSurface === normalizedHeadword && + (STANDALONE_GRAMMAR_PARTICLE_SURFACES.has(normalizedSurface) || + STANDALONE_GRAMMAR_PARTICLE_PHRASES_SET.has(normalizedSurface)) + ); +} + +function isSingleKanaSurfaceFragment(token: MergedToken): boolean { + const normalizedSurface = normalizeKana(token.surface); + const chars = [...normalizedSurface]; + return chars.length === 1 && chars.every(isKanaChar); +} + function isExcludedByTerm(token: MergedToken): boolean { const candidates = [token.surface, token.reading, token.headword].filter( (candidate): candidate is string => typeof candidate === 'string' && candidate.length > 0, @@ -317,21 +408,11 @@ function isExcludedByTerm(token: MergedToken): boolean { continue; } - if ( - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_PREFIXES.some((prefix) => - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDING_THOUGHT_SUFFIXES.some( - (suffix) => normalized === `${prefix}${suffix}`, - ), - ) - ) { - return true; - } - if ( SUBTITLE_ANNOTATION_EXCLUDED_TERMS.has(trimmed) || SUBTITLE_ANNOTATION_EXCLUDED_TERMS.has(normalized) || - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS.has(trimmed) || - SUBTITLE_ANNOTATION_EXCLUDED_EXPLANATORY_ENDINGS.has(normalized) || + isSubtitleGrammarEndingText(trimmed) || + isSubtitleGrammarEndingText(normalized) || shouldIgnoreJlptByTerm(trimmed) || shouldIgnoreJlptByTerm(normalized) ) { @@ -388,10 +469,34 @@ export function shouldExcludeTokenFromSubtitleAnnotations( return true; } + if (isStandaloneAuxiliaryInflectionFragment(token)) { + return true; + } + + if (isAuxiliaryOnlyHelperSpan(token)) { + return true; + } + + if (isStandaloneSuruTeGrammarHelper(token)) { + return true; + } + + if (isStandaloneGrammarParticle(token)) { + return true; + } + + if (isSingleKanaSurfaceFragment(token)) { + return true; + } + if (isExcludedTrailingParticleMergedToken(token)) { return true; } + if (isLexicalKureruVerb(token)) { + return false; + } + return isExcludedByTerm(token); } @@ -405,7 +510,6 @@ export function stripSubtitleAnnotationMetadata( return { ...token, - isKnown: false, isNPlusOneTarget: false, isNameMatch: false, jlptLevel: undefined, diff --git a/src/core/services/tokenizer/yomitan-parser-runtime.test.ts b/src/core/services/tokenizer/yomitan-parser-runtime.test.ts index 152cff28..15b04c9f 100644 --- a/src/core/services/tokenizer/yomitan-parser-runtime.test.ts +++ b/src/core/services/tokenizer/yomitan-parser-runtime.test.ts @@ -533,7 +533,7 @@ test('requestYomitanTermFrequencies caches repeated term+reading lookups', async assert.equal(frequencyCalls, 1); }); -test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of parseText', async () => { +test('requestYomitanScanTokens prefers parseText tokenization over termsFind fragments', async () => { const scripts: string[] = []; const deps = createDeps(async (script) => { scripts.push(script); @@ -549,6 +549,138 @@ test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of ], }; } + if (script.includes('parseText')) { + return [ + { + source: 'scanning-parser', + index: 0, + content: [ + [ + { + text: '取り組んで', + reading: 'とりくんで', + headwords: [[{ term: '取り組む' }]], + }, + ], + ], + }, + ]; + } + return [ + { + surface: '取り', + reading: 'とり', + headword: '取る', + startPos: 0, + endPos: 2, + }, + { + surface: '組んで', + reading: 'くんで', + headword: '組む', + startPos: 2, + endPos: 5, + }, + ]; + }); + + const result = await requestYomitanScanTokens('取り組んで', deps, { + error: () => undefined, + }); + + assert.deepEqual(result, [ + { + surface: '取り組んで', + reading: 'とりくんで', + headword: '取り組む', + startPos: 0, + endPos: 5, + }, + ]); + assert.ok(scripts.some((script) => script.includes('parseText'))); + assert.ok(scripts.some((script) => script.includes('termsFind'))); +}); + +test('requestYomitanScanTokens keeps scanner metadata when parse spans agree', async () => { + const deps = createDeps(async (script) => { + if (script.includes('optionsGetFull')) { + return { + profileCurrent: 0, + profiles: [ + { + options: { + scanning: { length: 40 }, + }, + }, + ], + }; + } + if (script.includes('parseText')) { + return [ + { + source: 'scanning-parser', + index: 0, + content: [ + [ + { + text: 'アクア', + reading: 'あくあ', + headwords: [[{ term: 'アクア' }]], + }, + ], + ], + }, + ]; + } + return [ + { + surface: 'アクア', + reading: 'あくあ', + headword: 'アクア', + startPos: 0, + endPos: 3, + isNameMatch: true, + wordClasses: ['n'], + }, + ]; + }); + + const result = await requestYomitanScanTokens('アクア', deps, { + error: () => undefined, + }); + + assert.deepEqual(result, [ + { + surface: 'アクア', + reading: 'あくあ', + headword: 'アクア', + startPos: 0, + endPos: 3, + isNameMatch: true, + wordClasses: ['n'], + }, + ]); +}); + +test('requestYomitanScanTokens falls back to left-to-right termsFind scanning', async () => { + const scripts: string[] = []; + const deps = createDeps(async (script) => { + scripts.push(script); + if (script.includes('optionsGetFull')) { + return { + profileCurrent: 0, + profiles: [ + { + options: { + scanning: { length: 40 }, + }, + }, + ], + }; + } + if (script.includes('parseText')) { + return []; + } return [ { surface: 'カズマ', @@ -573,6 +705,7 @@ test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of endPos: 3, }, ]); + assert.ok(scripts.some((script) => script.includes('parseText'))); const scannerScript = scripts.find((script) => script.includes('termsFind')); assert.ok(scannerScript, 'expected termsFind scanning request script'); assert.doesNotMatch(scannerScript ?? '', /parseText/); @@ -891,6 +1024,105 @@ test('requestYomitanScanTokens can use frequency from later exact secondary-matc ]); }); +test('requestYomitanScanTokens uses exact frequency entry when selected reading differs', async () => { + let scannerScript = ''; + const deps = createDeps(async (script) => { + if (script.includes('termsFind')) { + scannerScript = script; + return []; + } + if (script.includes('optionsGetFull')) { + return { + profileCurrent: 0, + profileIndex: 0, + scanLength: 40, + dictionaries: ['JPDBv2㋕', 'Jiten', 'CC100'], + dictionaryPriorityByName: { + 'JPDBv2㋕': 0, + Jiten: 1, + CC100: 2, + }, + dictionaryFrequencyModeByName: { + 'JPDBv2㋕': 'rank-based', + Jiten: 'rank-based', + CC100: 'rank-based', + }, + profiles: [ + { + options: { + scanning: { length: 40 }, + dictionaries: [ + { name: 'JPDBv2㋕', enabled: true, id: 0 }, + { name: 'Jiten', enabled: true, id: 1 }, + { name: 'CC100', enabled: true, id: 2 }, + ], + }, + }, + ], + }; + } + return null; + }); + + await requestYomitanScanTokens('第二走者', deps, { + error: () => undefined, + }); + + const result = (await runInjectedYomitanScript(scannerScript, (action, params) => { + if (action !== 'termsFind') { + throw new Error(`unexpected action: ${action}`); + } + + const text = (params as { text?: string } | undefined)?.text ?? ''; + if (!text.startsWith('第二')) { + return { originalTextLength: 0, dictionaryEntries: [] }; + } + + return { + originalTextLength: 2, + dictionaryEntries: [ + { + headwords: [ + { + term: '第二', + reading: 'だいに', + sources: [{ originalText: '第二', isPrimary: true, matchType: 'exact' }], + }, + ], + frequencies: [], + }, + { + headwords: [ + { + term: '第二', + reading: '', + sources: [{ originalText: '第二', isPrimary: false, matchType: 'exact' }], + }, + ], + frequencies: [ + { + headwordIndex: 0, + dictionary: 'JPDBv2㋕', + frequency: 189513, + displayValue: '1820,189513句', + }, + ], + }, + ], + }; + })) as Array>; + + assert.deepEqual(result?.[0], { + surface: '第二', + reading: 'だいに', + headword: '第二', + startPos: 0, + endPos: 2, + isNameMatch: false, + frequencyRank: 1820, + }); +}); + test('requestYomitanScanTokens marks tokens backed by SubMiner character dictionary entries', async () => { const deps = createDeps(async (script) => { if (script.includes('optionsGetFull')) { @@ -1049,6 +1281,60 @@ test('requestYomitanScanTokens marks grouped entries when SubMiner dictionary al assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true); }); +test('requestYomitanScanTokens preserves matched headword word classes', async () => { + let scannerScript = ''; + const deps = createDeps(async (script) => { + if (script.includes('termsFind')) { + scannerScript = script; + return []; + } + if (script.includes('optionsGetFull')) { + return { + profileCurrent: 0, + profiles: [ + { + options: { + scanning: { length: 40 }, + }, + }, + ], + }; + } + return null; + }); + + await requestYomitanScanTokens('は', deps, { error: () => undefined }); + + const result = await runInjectedYomitanScript(scannerScript, (action, params) => { + if (action !== 'termsFind') { + throw new Error(`unexpected action: ${action}`); + } + + const text = (params as { text?: string } | undefined)?.text; + if (text !== 'は') { + return { originalTextLength: 0, dictionaryEntries: [] }; + } + + return { + originalTextLength: 1, + dictionaryEntries: [ + { + headwords: [ + { + term: 'は', + reading: 'は', + wordClasses: ['prt'], + sources: [{ originalText: 'は', isPrimary: true, matchType: 'exact' }], + }, + ], + }, + ], + }; + }); + + assert.deepEqual((result as Array<{ wordClasses?: string[] }>)[0]?.wordClasses, ['prt']); +}); + test('requestYomitanScanTokens skips fallback fragments without exact primary source matches', async () => { const deps = createDeps(async (script) => { if (script.includes('optionsGetFull')) { diff --git a/src/core/services/tokenizer/yomitan-parser-runtime.ts b/src/core/services/tokenizer/yomitan-parser-runtime.ts index 568dc053..b5591dda 100644 --- a/src/core/services/tokenizer/yomitan-parser-runtime.ts +++ b/src/core/services/tokenizer/yomitan-parser-runtime.ts @@ -53,6 +53,7 @@ export interface YomitanScanToken { endPos: number; isNameMatch?: boolean; frequencyRank?: number; + wordClasses?: string[]; } interface YomitanProfileMetadata { @@ -91,11 +92,30 @@ function isScanTokenArray(value: unknown): value is YomitanScanToken[] { typeof entry.startPos === 'number' && typeof entry.endPos === 'number' && (entry.isNameMatch === undefined || typeof entry.isNameMatch === 'boolean') && - (entry.frequencyRank === undefined || typeof entry.frequencyRank === 'number'), + (entry.frequencyRank === undefined || typeof entry.frequencyRank === 'number') && + (entry.wordClasses === undefined || + (Array.isArray(entry.wordClasses) && + entry.wordClasses.every((wordClass) => typeof wordClass === 'string'))), ) ); } +function hasSameTokenSpans(left: YomitanScanToken[], right: YomitanScanToken[]): boolean { + if (left.length !== right.length) { + return false; + } + + return left.every((token, index) => { + const other = right[index]; + return ( + other !== undefined && + token.surface === other.surface && + token.startPos === other.startPos && + token.endPos === other.endPos + ); + }); +} + function makeTermReadingCacheKey(term: string, reading: string | null): string { return `${term}\u0000${reading ?? ''}`; } @@ -956,6 +976,9 @@ const YOMITAN_SCANNING_HELPERS = String.raw` const matchReading = typeof match.headword?.reading === 'string' ? match.headword.reading : ''; const preferredReading = typeof preferredMatch.headword?.reading === 'string' ? preferredMatch.headword.reading : ''; + if (!matchReading || !preferredReading) { + return true; + } return matchReading === preferredReading; } function getBestFrequencyRankForMatches(matches, dictionaryPriorityByName, dictionaryFrequencyModeByName) { @@ -975,6 +998,11 @@ const YOMITAN_SCANNING_HELPERS = String.raw` return best; } function getPreferredHeadword(dictionaryEntries, token, dictionaryPriorityByName, dictionaryFrequencyModeByName) { + function normalizeWordClasses(headword) { + if (!Array.isArray(headword?.wordClasses)) { return undefined; } + const classes = headword.wordClasses.filter((wordClass) => typeof wordClass === "string" && wordClass.trim().length > 0); + return classes.length > 0 ? classes : undefined; + } function appendDictionaryNames(target, value) { if (!value || typeof value !== 'object') { return; @@ -1033,6 +1061,7 @@ const YOMITAN_SCANNING_HELPERS = String.raw` return { term: preferredMatch.headword.term, reading: preferredMatch.headword.reading, + wordClasses: normalizeWordClasses(preferredMatch.headword), isNameMatch: matchedNameDictionary || isNameDictionaryEntry(preferredMatch.dictionaryEntry), frequencyRank: getBestFrequencyRankForMatches( exactFrequencyMatches.length > 0 ? exactFrequencyMatches : exactPrimaryMatches, @@ -1099,7 +1128,7 @@ ${YOMITAN_SCANNING_HELPERS} if (preferredHeadword && typeof preferredHeadword.term === "string") { const reading = typeof preferredHeadword.reading === "string" ? preferredHeadword.reading : ""; const segments = distributeFuriganaInflected(preferredHeadword.term, reading, source); - tokens.push({ + const tokenPayload = { surface: segments.map((segment) => segment.text).join("") || source, reading: segments.map((segment) => typeof segment.reading === "string" ? segment.reading : "").join(""), headword: preferredHeadword.term, @@ -1110,7 +1139,11 @@ ${YOMITAN_SCANNING_HELPERS} typeof preferredHeadword.frequencyRank === "number" && Number.isFinite(preferredHeadword.frequencyRank) ? Math.max(1, Math.floor(preferredHeadword.frequencyRank)) : undefined, - }); + }; + if (Array.isArray(preferredHeadword.wordClasses) && preferredHeadword.wordClasses.length > 0) { + tokenPayload.wordClasses = preferredHeadword.wordClasses; + } + tokens.push(tokenPayload); i += originalTextLength; continue; } @@ -1235,6 +1268,17 @@ export async function requestYomitanScanTokens( return null; } + const parseResults = await requestYomitanParseResults(text, deps, logger); + const selectedParseTokens = selectYomitanParseTokens(parseResults, () => false, 'headword'); + const parseScanTokens = + selectedParseTokens?.map((token) => ({ + surface: token.surface, + reading: token.reading, + headword: token.headword, + startPos: token.startPos, + endPos: token.endPos, + })) ?? null; + const metadata = await requestYomitanProfileMetadata(parserWindow, logger); const profileIndex = metadata?.profileIndex ?? 0; const scanLength = metadata?.scanLength ?? DEFAULT_YOMITAN_SCAN_LENGTH; @@ -1252,6 +1296,9 @@ export async function requestYomitanScanTokens( true, ); if (isScanTokenArray(rawResult)) { + if (parseScanTokens && parseScanTokens.length > 0) { + return hasSameTokenSpans(parseScanTokens, rawResult) ? rawResult : parseScanTokens; + } return rawResult; } if (Array.isArray(rawResult)) { @@ -1266,8 +1313,14 @@ export async function requestYomitanScanTokens( })) ?? null ); } + if (parseScanTokens && parseScanTokens.length > 0) { + return parseScanTokens; + } return null; } catch (err) { + if (parseScanTokens && parseScanTokens.length > 0) { + return parseScanTokens; + } logger.error('Yomitan scanner request failed:', (err as Error).message); return null; } diff --git a/src/main.ts b/src/main.ts index 8c586a79..83ebc81e 100644 --- a/src/main.ts +++ b/src/main.ts @@ -33,6 +33,11 @@ import { import { applyControllerConfigUpdate } from './main/controller-config-update.js'; import { openPlaylistBrowser as openPlaylistBrowserRuntime } from './main/runtime/playlist-browser-open'; import { createDiscordRpcClient } from './main/runtime/discord-rpc-client.js'; +import { + type CancelLinuxMpvFullscreenOverlayRefreshBurst, + clearLinuxMpvFullscreenOverlayRefreshTimeouts, + updateLinuxMpvFullscreenOverlayRefreshBurst, +} from './main/runtime/linux-mpv-fullscreen-overlay-refresh'; import { mergeAiConfig } from './ai/config'; function getPasswordStoreArg(argv: string[]): string | null { @@ -1402,6 +1407,8 @@ const subtitleProcessingController = createSubtitleProcessingController( let subtitlePrefetchService: SubtitlePrefetchService | null = null; let subtitlePrefetchRefreshTimer: ReturnType | null = null; let lastObservedTimePos = 0; +let cancelLinuxMpvFullscreenOverlayRefreshBurst: CancelLinuxMpvFullscreenOverlayRefreshBurst | null = + null; const SEEK_THRESHOLD_SECONDS = 3; function clearScheduledSubtitlePrefetchRefresh(): void { @@ -1411,6 +1418,11 @@ function clearScheduledSubtitlePrefetchRefresh(): void { } } +function cancelPendingLinuxMpvFullscreenOverlayRefreshBurst(): void { + cancelLinuxMpvFullscreenOverlayRefreshBurst?.(); + cancelLinuxMpvFullscreenOverlayRefreshBurst = null; +} + const subtitlePrefetchInitController = createSubtitlePrefetchInitController({ getCurrentService: () => subtitlePrefetchService, setCurrentService: (service) => { @@ -3136,6 +3148,10 @@ const { stopTexthookerService: () => texthookerService.stop(), clearWindowsVisibleOverlayForegroundPollLoop: () => clearWindowsVisibleOverlayForegroundPollLoop(), + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => { + cancelLinuxMpvFullscreenOverlayRefreshBurst = null; + clearLinuxMpvFullscreenOverlayRefreshTimeouts(); + }, getMainOverlayWindow: () => overlayManager.getMainWindow(), clearMainOverlayWindow: () => overlayManager.setMainWindow(null), getModalOverlayWindow: () => overlayManager.getModalWindow(), @@ -3422,6 +3438,9 @@ const recordTrackedCardsMined = (count: number, noteIds?: number[]): void => { ensureImmersionTrackerStarted(); appState.immersionTracker?.recordCardsMined(count, noteIds); }; +const refreshCurrentSubtitleAfterKnownWordUpdate = (): void => { + subtitleProcessingController.refreshCurrentSubtitle(appState.currentSubText); +}; let hasAttemptedImmersionTrackerStartup = false; const ensureImmersionTrackerStarted = (): void => { if (hasAttemptedImmersionTrackerStartup || appState.immersionTracker) { @@ -3840,6 +3859,20 @@ const { } lastObservedTimePos = time; }, + onFullscreenChange: (fullscreen) => { + cancelLinuxMpvFullscreenOverlayRefreshBurst = updateLinuxMpvFullscreenOverlayRefreshBurst( + fullscreen, + { + overlayManager: { + getMainWindow: () => overlayManager.getMainWindow(), + getVisibleOverlayVisible: () => overlayManager.getVisibleOverlayVisible(), + }, + overlayVisibilityRuntime, + ensureOverlayWindowLevel: (window) => ensureOverlayWindowLevel(window), + }, + cancelLinuxMpvFullscreenOverlayRefreshBurst, + ); + }, onSubtitleTrackChange: (sid) => { scheduleSubtitlePrefetchRefresh(); youtubePrimarySubtitleNotificationRuntime.handleSubtitleTrackChange(sid); @@ -4080,10 +4113,18 @@ const buildUpdateVisibleOverlayBoundsMainDepsHandler = createBuildUpdateVisibleOverlayBoundsMainDepsHandler({ setOverlayWindowBounds: (geometry) => applyOverlayRegions(geometry), afterSetOverlayWindowBounds: () => { - if (process.platform !== 'win32' || !overlayManager.getVisibleOverlayVisible()) { + if (!overlayManager.getVisibleOverlayVisible()) { return; } - scheduleWindowsVisibleOverlayZOrderSyncBurst(); + if (process.platform === 'win32') { + scheduleWindowsVisibleOverlayZOrderSyncBurst(); + return; + } + const mainWindow = overlayManager.getMainWindow(); + if (!mainWindow || mainWindow.isDestroyed()) { + return; + } + ensureOverlayWindowLevel(mainWindow); }, }); const updateVisibleOverlayBoundsMainDeps = buildUpdateVisibleOverlayBoundsMainDepsHandler(); @@ -4228,6 +4269,9 @@ function destroyTray(): void { function initializeOverlayRuntime(): void { initializeOverlayRuntimeHandler(); appState.ankiIntegration?.setRecordCardsMinedCallback(recordTrackedCardsMined); + appState.ankiIntegration?.setKnownWordCacheUpdatedCallback( + refreshCurrentSubtitleAfterKnownWordUpdate, + ); syncOverlayMpvSubtitleSuppression(); } @@ -4906,6 +4950,7 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ openAnilistSetup: () => openAnilistSetupWindow(), getAnilistQueueStatus: () => anilistStateRuntime.getQueueStatusSnapshot(), retryAnilistQueueNow: () => processNextAnilistRetryUpdate(), + runAnilistPostWatchUpdateOnManualMark: () => maybeRunAnilistPostWatchUpdate({ force: true }), getCharacterDictionarySelection: () => characterDictionaryRuntime.getManualSelectionSnapshot(), setCharacterDictionarySelection: async (mediaId: number) => @@ -4934,6 +4979,9 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({ setAnkiIntegration: (integration: AnkiIntegration | null) => { appState.ankiIntegration = integration; appState.ankiIntegration?.setRecordCardsMinedCallback(recordTrackedCardsMined); + appState.ankiIntegration?.setKnownWordCacheUpdatedCallback( + refreshCurrentSubtitleAfterKnownWordUpdate, + ); }, getKnownWordCacheStatePath: () => path.join(USER_DATA_PATH, 'known-words-cache.json'), showDesktopNotification, @@ -5159,6 +5207,7 @@ const { createMainWindow: createMainWindowHandler, createModalWindow: createModa onWindowContentReady: () => overlayVisibilityRuntime.updateVisibleOverlayVisibility(), onWindowClosed: (windowKind) => { if (windowKind === 'visible') { + cancelPendingLinuxMpvFullscreenOverlayRefreshBurst(); overlayManager.setMainWindow(null); } else { overlayManager.setModalWindow(null); @@ -5433,6 +5482,9 @@ function ensureOverlayWindowsReadyForVisibilityActions(): void { function setVisibleOverlayVisible(visible: boolean): void { ensureOverlayWindowsReadyForVisibilityActions(); + if (!visible) { + cancelPendingLinuxMpvFullscreenOverlayRefreshBurst(); + } if (visible) { void ensureOverlayMpvSubtitlesHidden(); } @@ -5442,13 +5494,18 @@ function setVisibleOverlayVisible(visible: boolean): void { function toggleVisibleOverlay(): void { ensureOverlayWindowsReadyForVisibilityActions(); - if (!overlayManager.getVisibleOverlayVisible()) { + if (overlayManager.getVisibleOverlayVisible()) { + cancelPendingLinuxMpvFullscreenOverlayRefreshBurst(); + } else { void ensureOverlayMpvSubtitlesHidden(); } toggleVisibleOverlayHandler(); syncOverlayMpvSubtitleSuppression(); } function setOverlayVisible(visible: boolean): void { + if (!visible) { + cancelPendingLinuxMpvFullscreenOverlayRefreshBurst(); + } if (visible) { void ensureOverlayMpvSubtitlesHidden(); } diff --git a/src/main/dependencies.ts b/src/main/dependencies.ts index 24672ea8..443019c4 100644 --- a/src/main/dependencies.ts +++ b/src/main/dependencies.ts @@ -94,6 +94,7 @@ export interface MainIpcRuntimeServiceDepsParams { openAnilistSetup: IpcDepsRuntimeOptions['openAnilistSetup']; getAnilistQueueStatus: IpcDepsRuntimeOptions['getAnilistQueueStatus']; retryAnilistQueueNow: IpcDepsRuntimeOptions['retryAnilistQueueNow']; + runAnilistPostWatchUpdateOnManualMark?: IpcDepsRuntimeOptions['runAnilistPostWatchUpdateOnManualMark']; getCharacterDictionarySelection?: IpcDepsRuntimeOptions['getCharacterDictionarySelection']; setCharacterDictionarySelection?: IpcDepsRuntimeOptions['setCharacterDictionarySelection']; appendClipboardVideoToQueue: IpcDepsRuntimeOptions['appendClipboardVideoToQueue']; @@ -263,6 +264,7 @@ export function createMainIpcRuntimeServiceDeps( openAnilistSetup: params.openAnilistSetup, getAnilistQueueStatus: params.getAnilistQueueStatus, retryAnilistQueueNow: params.retryAnilistQueueNow, + runAnilistPostWatchUpdateOnManualMark: params.runAnilistPostWatchUpdateOnManualMark, getCharacterDictionarySelection: params.getCharacterDictionarySelection, setCharacterDictionarySelection: params.setCharacterDictionarySelection, appendClipboardVideoToQueue: params.appendClipboardVideoToQueue, diff --git a/src/main/runtime/anilist-post-watch.test.ts b/src/main/runtime/anilist-post-watch.test.ts index 94f2db6b..83923a11 100644 --- a/src/main/runtime/anilist-post-watch.test.ts +++ b/src/main/runtime/anilist-post-watch.test.ts @@ -77,6 +77,107 @@ test('createMaybeRunAnilistPostWatchUpdateHandler queues when token missing', as assert.ok(calls.includes('inflight:false')); }); +test('createMaybeRunAnilistPostWatchUpdateHandler force-runs manual watched updates below threshold', async () => { + const calls: string[] = []; + const handler = createMaybeRunAnilistPostWatchUpdateHandler({ + getInFlight: () => false, + setInFlight: (value) => calls.push(`inflight:${value}`), + getResolvedConfig: () => ({}), + isAnilistTrackingEnabled: () => true, + getCurrentMediaKey: () => '/tmp/video.mkv', + hasMpvClient: () => false, + getTrackedMediaKey: () => '/tmp/video.mkv', + resetTrackedMedia: () => {}, + getWatchedSeconds: () => 0, + maybeProbeAnilistDuration: async () => { + calls.push('probe'); + return 1000; + }, + ensureAnilistMediaGuess: async () => ({ title: 'Show', episode: 3 }), + hasAttemptedUpdateKey: () => false, + processNextAnilistRetryUpdate: async () => ({ ok: true, message: 'noop' }), + refreshAnilistClientSecretState: async () => 'token', + enqueueRetry: () => calls.push('enqueue'), + markRetryFailure: () => calls.push('mark-failure'), + markRetrySuccess: () => calls.push('mark-success'), + refreshRetryQueueState: () => calls.push('refresh'), + updateAnilistPostWatchProgress: async () => { + calls.push('update'); + return { status: 'updated', message: 'updated ok' }; + }, + rememberAttemptedUpdateKey: () => calls.push('remember'), + showMpvOsd: (message) => calls.push(`osd:${message}`), + logInfo: (message) => calls.push(`info:${message}`), + logWarn: (message) => calls.push(`warn:${message}`), + minWatchSeconds: 600, + minWatchRatio: 0.85, + }); + + await handler({ force: true }); + + assert.equal(calls.includes('probe'), false); + assert.ok(calls.includes('update')); + assert.ok(calls.includes('remember')); + assert.ok(calls.includes('osd:updated ok')); +}); + +test('createMaybeRunAnilistPostWatchUpdateHandler blocks concurrent runs before async gating', async () => { + const calls: string[] = []; + let inFlight = false; + let resolveDuration!: (duration: number) => void; + const durationPromise = new Promise((resolve) => { + resolveDuration = resolve; + }); + const handler = createMaybeRunAnilistPostWatchUpdateHandler({ + getInFlight: () => inFlight, + setInFlight: (value) => { + inFlight = value; + calls.push(`inflight:${value}`); + }, + getResolvedConfig: () => ({}), + isAnilistTrackingEnabled: () => true, + getCurrentMediaKey: () => '/tmp/video.mkv', + hasMpvClient: () => true, + getTrackedMediaKey: () => '/tmp/video.mkv', + resetTrackedMedia: () => {}, + getWatchedSeconds: () => 1000, + maybeProbeAnilistDuration: async () => { + calls.push('probe'); + return await durationPromise; + }, + ensureAnilistMediaGuess: async () => ({ title: 'Show', season: null, episode: 1 }), + hasAttemptedUpdateKey: () => false, + processNextAnilistRetryUpdate: async () => ({ ok: true, message: 'noop' }), + refreshAnilistClientSecretState: async () => 'token', + enqueueRetry: () => calls.push('enqueue'), + markRetryFailure: () => calls.push('mark-failure'), + markRetrySuccess: () => calls.push('mark-success'), + refreshRetryQueueState: () => calls.push('refresh'), + updateAnilistPostWatchProgress: async () => { + calls.push('update'); + return { status: 'updated', message: 'updated ok' }; + }, + rememberAttemptedUpdateKey: () => calls.push('remember'), + showMpvOsd: (message) => calls.push(`osd:${message}`), + logInfo: (message) => calls.push(`info:${message}`), + logWarn: (message) => calls.push(`warn:${message}`), + minWatchSeconds: 600, + minWatchRatio: 0.85, + }); + + const firstRun = handler(); + assert.deepEqual(calls, ['inflight:true', 'probe']); + + await handler(); + assert.deepEqual(calls, ['inflight:true', 'probe']); + + resolveDuration(1000); + await firstRun; + + assert.equal(calls.filter((call) => call === 'update').length, 1); + assert.equal(calls.at(-1), 'inflight:false'); +}); + test('createMaybeRunAnilistPostWatchUpdateHandler skips youtube playback entirely', async () => { const calls: string[] = []; const handler = createMaybeRunAnilistPostWatchUpdateHandler({ diff --git a/src/main/runtime/anilist-post-watch.ts b/src/main/runtime/anilist-post-watch.ts index 89bc3cc1..47a9a3d4 100644 --- a/src/main/runtime/anilist-post-watch.ts +++ b/src/main/runtime/anilist-post-watch.ts @@ -16,6 +16,10 @@ type RetryQueueItem = { episode: number; }; +type AnilistPostWatchRunOptions = { + force?: boolean; +}; + export function buildAnilistAttemptKey(mediaKey: string, episode: number): string { return `${mediaKey}::${episode}`; } @@ -118,10 +122,11 @@ export function createMaybeRunAnilistPostWatchUpdateHandler(deps: { minWatchSeconds: number; minWatchRatio: number; }) { - return async (): Promise => { + return async (options: AnilistPostWatchRunOptions = {}): Promise => { if (deps.getInFlight()) { return; } + const force = options.force === true; const resolved = deps.getResolvedConfig(); if (!deps.isAnilistTrackingEnabled(resolved)) { @@ -129,7 +134,7 @@ export function createMaybeRunAnilistPostWatchUpdateHandler(deps: { } const mediaKey = deps.getCurrentMediaKey(); - if (!mediaKey || !deps.hasMpvClient()) { + if (!mediaKey || (!force && !deps.hasMpvClient())) { return; } if (isYoutubeMediaPath(mediaKey)) { @@ -139,31 +144,36 @@ export function createMaybeRunAnilistPostWatchUpdateHandler(deps: { deps.resetTrackedMedia(mediaKey); } - const watchedSeconds = deps.getWatchedSeconds(); - if (!Number.isFinite(watchedSeconds) || watchedSeconds < deps.minWatchSeconds) { - return; - } - - const duration = await deps.maybeProbeAnilistDuration(mediaKey); - if (!duration || duration <= 0) { - return; - } - if (watchedSeconds / duration < deps.minWatchRatio) { - return; - } - - const guess = await deps.ensureAnilistMediaGuess(mediaKey); - if (!guess?.title || !guess.episode || guess.episode <= 0) { - return; - } - - const attemptKey = buildAnilistAttemptKey(mediaKey, guess.episode); - if (deps.hasAttemptedUpdateKey(attemptKey)) { - return; + let watchedSeconds = 0; + if (!force) { + watchedSeconds = deps.getWatchedSeconds(); + if (!Number.isFinite(watchedSeconds) || watchedSeconds < deps.minWatchSeconds) { + return; + } } deps.setInFlight(true); try { + if (!force) { + const duration = await deps.maybeProbeAnilistDuration(mediaKey); + if (!duration || duration <= 0) { + return; + } + if (watchedSeconds / duration < deps.minWatchRatio) { + return; + } + } + + const guess = await deps.ensureAnilistMediaGuess(mediaKey); + if (!guess?.title || !guess.episode || guess.episode <= 0) { + return; + } + + const attemptKey = buildAnilistAttemptKey(mediaKey, guess.episode); + if (deps.hasAttemptedUpdateKey(attemptKey)) { + return; + } + await deps.processNextAnilistRetryUpdate(); if (deps.hasAttemptedUpdateKey(attemptKey)) { return; diff --git a/src/main/runtime/anilist-setup-protocol-main-deps.test.ts b/src/main/runtime/anilist-setup-protocol-main-deps.test.ts index 60106f62..dc975e61 100644 --- a/src/main/runtime/anilist-setup-protocol-main-deps.test.ts +++ b/src/main/runtime/anilist-setup-protocol-main-deps.test.ts @@ -27,7 +27,10 @@ test('consume anilist setup token main deps builder maps callbacks', () => { const calls: string[] = []; const deps = createBuildConsumeAnilistSetupTokenFromUrlMainDepsHandler({ consumeAnilistSetupCallbackUrl: () => true, - saveToken: () => calls.push('save'), + saveToken: () => { + calls.push('save'); + return true; + }, setCachedToken: () => calls.push('cache'), setResolvedState: () => calls.push('resolved'), setSetupPageOpened: () => calls.push('opened'), @@ -38,7 +41,7 @@ test('consume anilist setup token main deps builder maps callbacks', () => { assert.equal( deps.consumeAnilistSetupCallbackUrl({ rawUrl: 'subminer://anilist-setup', - saveToken: () => {}, + saveToken: () => true, setCachedToken: () => {}, setResolvedState: () => {}, setSetupPageOpened: () => {}, diff --git a/src/main/runtime/anilist-setup-protocol.test.ts b/src/main/runtime/anilist-setup-protocol.test.ts index a006668b..dbc35de7 100644 --- a/src/main/runtime/anilist-setup-protocol.test.ts +++ b/src/main/runtime/anilist-setup-protocol.test.ts @@ -22,7 +22,7 @@ test('createNotifyAnilistSetupHandler sends OSD when mpv client exists', () => { test('createConsumeAnilistSetupTokenFromUrlHandler delegates with deps', () => { const consume = createConsumeAnilistSetupTokenFromUrlHandler({ consumeAnilistSetupCallbackUrl: (input) => input.rawUrl.includes('access_token=ok'), - saveToken: () => {}, + saveToken: () => true, setCachedToken: () => {}, setResolvedState: () => {}, setSetupPageOpened: () => {}, diff --git a/src/main/runtime/anilist-setup-protocol.ts b/src/main/runtime/anilist-setup-protocol.ts index 90d75e60..fe4e5d84 100644 --- a/src/main/runtime/anilist-setup-protocol.ts +++ b/src/main/runtime/anilist-setup-protocol.ts @@ -1,14 +1,14 @@ export type ConsumeAnilistSetupTokenDeps = { consumeAnilistSetupCallbackUrl: (input: { rawUrl: string; - saveToken: (token: string) => void; + saveToken: (token: string) => boolean; setCachedToken: (token: string) => void; setResolvedState: (resolvedAt: number) => void; setSetupPageOpened: (opened: boolean) => void; onSuccess: () => void; closeWindow: () => void; }) => boolean; - saveToken: (token: string) => void; + saveToken: (token: string) => boolean; setCachedToken: (token: string) => void; setResolvedState: (resolvedAt: number) => void; setSetupPageOpened: (opened: boolean) => void; diff --git a/src/main/runtime/anilist-setup.test.ts b/src/main/runtime/anilist-setup.test.ts index 7741735c..01005bda 100644 --- a/src/main/runtime/anilist-setup.test.ts +++ b/src/main/runtime/anilist-setup.test.ts @@ -90,7 +90,10 @@ test('consumeAnilistSetupCallbackUrl persists token and closes window for callba Date.now = () => 120_000; const handled = consumeAnilistSetupCallbackUrl({ rawUrl: 'https://anilist.subminer.moe/#access_token=saved-token', - saveToken: (value: string) => events.push(`save:${value}`), + saveToken: (value: string) => { + events.push(`save:${value}`); + return true; + }, setCachedToken: (value: string) => events.push(`cache:${value}`), setResolvedState: (timestampMs: number) => events.push(`state:${timestampMs > 0 ? 'ok' : 'bad'}`), @@ -120,7 +123,10 @@ test('consumeAnilistSetupCallbackUrl persists token for subminer deep link URL', Date.now = () => 120_000; const handled = consumeAnilistSetupCallbackUrl({ rawUrl: 'subminer://anilist-setup?access_token=saved-token', - saveToken: (value: string) => events.push(`save:${value}`), + saveToken: (value: string) => { + events.push(`save:${value}`); + return true; + }, setCachedToken: (value: string) => events.push(`cache:${value}`), setResolvedState: (timestampMs: number) => events.push(`state:${timestampMs > 0 ? 'ok' : 'bad'}`), @@ -143,11 +149,33 @@ test('consumeAnilistSetupCallbackUrl persists token for subminer deep link URL', } }); +test('consumeAnilistSetupCallbackUrl keeps setup open when token persistence fails', () => { + const events: string[] = []; + const handled = consumeAnilistSetupCallbackUrl({ + rawUrl: 'subminer://anilist-setup?access_token=saved-token', + saveToken: (value: string) => { + events.push(`save:${value}`); + return false; + }, + setCachedToken: () => events.push('cache'), + setResolvedState: () => events.push('state'), + setSetupPageOpened: (opened: boolean) => events.push(`opened:${opened}`), + onSuccess: () => events.push('success'), + closeWindow: () => events.push('close'), + }); + + assert.equal(handled, true); + assert.deepEqual(events, ['save:saved-token', 'opened:true']); +}); + test('consumeAnilistSetupCallbackUrl ignores non-callback URLs', () => { const events: string[] = []; const handled = consumeAnilistSetupCallbackUrl({ rawUrl: 'https://anilist.co/settings/developer', - saveToken: () => events.push('save'), + saveToken: () => { + events.push('save'); + return true; + }, setCachedToken: () => events.push('cache'), setResolvedState: () => events.push('state'), setSetupPageOpened: () => events.push('opened'), diff --git a/src/main/runtime/anilist-setup.ts b/src/main/runtime/anilist-setup.ts index 8d55df6f..972c91b7 100644 --- a/src/main/runtime/anilist-setup.ts +++ b/src/main/runtime/anilist-setup.ts @@ -10,7 +10,7 @@ export type BuildAnilistSetupUrlDeps = { export type ConsumeAnilistSetupCallbackUrlDeps = { rawUrl: string; - saveToken: (token: string) => void; + saveToken: (token: string) => boolean; setCachedToken: (token: string) => void; setResolvedState: (resolvedAt: number) => void; setSetupPageOpened: (opened: boolean) => void; @@ -71,8 +71,12 @@ export function consumeAnilistSetupCallbackUrl(deps: ConsumeAnilistSetupCallback return false; } + if (!deps.saveToken(token)) { + deps.setSetupPageOpened(true); + return true; + } + const resolvedAt = Date.now(); - deps.saveToken(token); deps.setCachedToken(token); deps.setResolvedState(resolvedAt); deps.setSetupPageOpened(false); diff --git a/src/main/runtime/app-lifecycle-actions.test.ts b/src/main/runtime/app-lifecycle-actions.test.ts index 8d5727bd..9e858daf 100644 --- a/src/main/runtime/app-lifecycle-actions.test.ts +++ b/src/main/runtime/app-lifecycle-actions.test.ts @@ -18,6 +18,8 @@ test('on will quit cleanup handler runs all cleanup steps', () => { stopTexthookerService: () => calls.push('stop-texthooker'), clearWindowsVisibleOverlayForegroundPollLoop: () => calls.push('clear-windows-visible-overlay-poll'), + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => + calls.push('clear-linux-mpv-fullscreen-overlay-refresh-timeouts'), destroyMainOverlayWindow: () => calls.push('destroy-main-overlay-window'), destroyModalOverlayWindow: () => calls.push('destroy-modal-overlay-window'), destroyYomitanParserWindow: () => calls.push('destroy-yomitan-window'), @@ -42,10 +44,11 @@ test('on will quit cleanup handler runs all cleanup steps', () => { }); cleanup(); - assert.equal(calls.length, 29); + assert.equal(calls.length, 30); assert.equal(calls[0], 'destroy-tray'); assert.equal(calls[calls.length - 1], 'stop-discord-presence'); assert.ok(calls.includes('clear-windows-visible-overlay-poll')); + assert.ok(calls.includes('clear-linux-mpv-fullscreen-overlay-refresh-timeouts')); assert.ok(calls.indexOf('flush-mpv-log') < calls.indexOf('destroy-socket')); }); diff --git a/src/main/runtime/app-lifecycle-actions.ts b/src/main/runtime/app-lifecycle-actions.ts index 5e90c0e7..cf9eb634 100644 --- a/src/main/runtime/app-lifecycle-actions.ts +++ b/src/main/runtime/app-lifecycle-actions.ts @@ -7,6 +7,7 @@ export function createOnWillQuitCleanupHandler(deps: { stopSubtitleWebsocket: () => void; stopTexthookerService: () => void; clearWindowsVisibleOverlayForegroundPollLoop: () => void; + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => void; destroyMainOverlayWindow: () => void; destroyModalOverlayWindow: () => void; destroyYomitanParserWindow: () => void; @@ -38,6 +39,7 @@ export function createOnWillQuitCleanupHandler(deps: { deps.stopSubtitleWebsocket(); deps.stopTexthookerService(); deps.clearWindowsVisibleOverlayForegroundPollLoop(); + deps.clearLinuxMpvFullscreenOverlayRefreshTimeouts(); deps.destroyMainOverlayWindow(); deps.destroyModalOverlayWindow(); deps.destroyYomitanParserWindow(); diff --git a/src/main/runtime/app-lifecycle-main-cleanup.test.ts b/src/main/runtime/app-lifecycle-main-cleanup.test.ts index e2bd0b3e..e9bd557a 100644 --- a/src/main/runtime/app-lifecycle-main-cleanup.test.ts +++ b/src/main/runtime/app-lifecycle-main-cleanup.test.ts @@ -20,6 +20,8 @@ test('cleanup deps builder returns handlers that guard optional runtime objects' stopTexthookerService: () => calls.push('stop-texthooker'), clearWindowsVisibleOverlayForegroundPollLoop: () => calls.push('clear-windows-visible-overlay-foreground-poll-loop'), + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => + calls.push('clear-linux-mpv-fullscreen-overlay-refresh-timeouts'), getMainOverlayWindow: () => ({ isDestroyed: () => false, destroy: () => calls.push('destroy-main-overlay-window'), @@ -88,6 +90,7 @@ test('cleanup deps builder returns handlers that guard optional runtime objects' assert.ok(calls.includes('stop-jellyfin-remote')); assert.ok(calls.includes('stop-discord-presence')); assert.ok(calls.includes('clear-windows-visible-overlay-foreground-poll-loop')); + assert.ok(calls.includes('clear-linux-mpv-fullscreen-overlay-refresh-timeouts')); assert.equal(reconnectTimer, null); assert.equal(immersionTracker, null); }); @@ -103,6 +106,7 @@ test('cleanup deps builder skips destroyed yomitan window', () => { stopSubtitleWebsocket: () => {}, stopTexthookerService: () => {}, clearWindowsVisibleOverlayForegroundPollLoop: () => {}, + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => {}, getMainOverlayWindow: () => ({ isDestroyed: () => true, destroy: () => calls.push('destroy-main-overlay-window'), diff --git a/src/main/runtime/app-lifecycle-main-cleanup.ts b/src/main/runtime/app-lifecycle-main-cleanup.ts index 6d4bbb9a..4ab2bd70 100644 --- a/src/main/runtime/app-lifecycle-main-cleanup.ts +++ b/src/main/runtime/app-lifecycle-main-cleanup.ts @@ -26,6 +26,7 @@ export function createBuildOnWillQuitCleanupDepsHandler(deps: { stopSubtitleWebsocket: () => void; stopTexthookerService: () => void; clearWindowsVisibleOverlayForegroundPollLoop: () => void; + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => void; getMainOverlayWindow: () => DestroyableWindow | null; clearMainOverlayWindow: () => void; getModalOverlayWindow: () => DestroyableWindow | null; @@ -67,6 +68,8 @@ export function createBuildOnWillQuitCleanupDepsHandler(deps: { stopTexthookerService: () => deps.stopTexthookerService(), clearWindowsVisibleOverlayForegroundPollLoop: () => deps.clearWindowsVisibleOverlayForegroundPollLoop(), + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => + deps.clearLinuxMpvFullscreenOverlayRefreshTimeouts(), destroyMainOverlayWindow: () => { const window = deps.getMainOverlayWindow(); if (!window) return; diff --git a/src/main/runtime/composers/anilist-setup-composer.test.ts b/src/main/runtime/composers/anilist-setup-composer.test.ts index 8a0a1dd7..10a5f971 100644 --- a/src/main/runtime/composers/anilist-setup-composer.test.ts +++ b/src/main/runtime/composers/anilist-setup-composer.test.ts @@ -15,7 +15,7 @@ test('composeAnilistSetupHandlers returns callable setup handlers', () => { }, consumeTokenDeps: { consumeAnilistSetupCallbackUrl: () => false, - saveToken: () => {}, + saveToken: () => true, setCachedToken: () => {}, setResolvedState: () => {}, setSetupPageOpened: () => {}, diff --git a/src/main/runtime/composers/startup-lifecycle-composer.test.ts b/src/main/runtime/composers/startup-lifecycle-composer.test.ts index f3ddad99..96fbb369 100644 --- a/src/main/runtime/composers/startup-lifecycle-composer.test.ts +++ b/src/main/runtime/composers/startup-lifecycle-composer.test.ts @@ -22,6 +22,7 @@ test('composeStartupLifecycleHandlers returns callable startup lifecycle handler stopSubtitleWebsocket: () => {}, stopTexthookerService: () => {}, clearWindowsVisibleOverlayForegroundPollLoop: () => {}, + clearLinuxMpvFullscreenOverlayRefreshTimeouts: () => {}, getMainOverlayWindow: () => null, clearMainOverlayWindow: () => {}, getModalOverlayWindow: () => null, diff --git a/src/main/runtime/linux-mpv-fullscreen-overlay-refresh.test.ts b/src/main/runtime/linux-mpv-fullscreen-overlay-refresh.test.ts new file mode 100644 index 00000000..c4343b4d --- /dev/null +++ b/src/main/runtime/linux-mpv-fullscreen-overlay-refresh.test.ts @@ -0,0 +1,93 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { + clearLinuxMpvFullscreenOverlayRefreshTimeouts, + updateLinuxMpvFullscreenOverlayRefreshBurst, + scheduleLinuxVisibleOverlayFullscreenRefreshBurst, +} from './linux-mpv-fullscreen-overlay-refresh'; + +test('linux mpv fullscreen overlay refresh burst schedules overlay refresh work on linux', async () => { + const originalPlatformDescriptor = Object.getOwnPropertyDescriptor(process, 'platform'); + Object.defineProperty(process, 'platform', { + configurable: true, + value: 'linux', + }); + + const calls: string[] = []; + + try { + scheduleLinuxVisibleOverlayFullscreenRefreshBurst({ + overlayManager: { + getMainWindow: () => + ({ + hide: () => calls.push('hide'), + isDestroyed: () => false, + isVisible: () => true, + showInactive: () => calls.push('showInactive'), + }) as never, + getVisibleOverlayVisible: () => true, + }, + overlayVisibilityRuntime: { + updateVisibleOverlayVisibility: () => calls.push('updateVisibleOverlayVisibility'), + }, + ensureOverlayWindowLevel: () => calls.push('ensureOverlayWindowLevel'), + }); + + const deadline = Date.now() + 200; + while (!calls.includes('updateVisibleOverlayVisibility') && Date.now() < deadline) { + await new Promise((resolve) => setTimeout(resolve, 5)); + } + + assert.ok(calls.includes('updateVisibleOverlayVisibility')); + assert.ok(calls.includes('hide')); + assert.ok(calls.includes('showInactive')); + assert.ok(calls.includes('ensureOverlayWindowLevel')); + } finally { + clearLinuxMpvFullscreenOverlayRefreshTimeouts(); + if (originalPlatformDescriptor) { + Object.defineProperty(process, 'platform', originalPlatformDescriptor); + } + } +}); + +test('linux mpv fullscreen overlay refresh update cancels burst when fullscreen exits', async () => { + const originalPlatformDescriptor = Object.getOwnPropertyDescriptor(process, 'platform'); + Object.defineProperty(process, 'platform', { + configurable: true, + value: 'linux', + }); + + const calls: string[] = []; + + try { + const deps = { + overlayManager: { + getMainWindow: () => + ({ + hide: () => calls.push('hide'), + isDestroyed: () => false, + isVisible: () => true, + showInactive: () => calls.push('showInactive'), + }) as never, + getVisibleOverlayVisible: () => true, + }, + overlayVisibilityRuntime: { + updateVisibleOverlayVisibility: () => calls.push('updateVisibleOverlayVisibility'), + }, + ensureOverlayWindowLevel: () => calls.push('ensureOverlayWindowLevel'), + }; + + const cancel = updateLinuxMpvFullscreenOverlayRefreshBurst(true, deps, null); + const nextCancel = updateLinuxMpvFullscreenOverlayRefreshBurst(false, deps, cancel); + + await new Promise((resolve) => setTimeout(resolve, 80)); + + assert.equal(nextCancel, null); + assert.deepEqual(calls, []); + } finally { + clearLinuxMpvFullscreenOverlayRefreshTimeouts(); + if (originalPlatformDescriptor) { + Object.defineProperty(process, 'platform', originalPlatformDescriptor); + } + } +}); diff --git a/src/main/runtime/linux-mpv-fullscreen-overlay-refresh.ts b/src/main/runtime/linux-mpv-fullscreen-overlay-refresh.ts new file mode 100644 index 00000000..419faed7 --- /dev/null +++ b/src/main/runtime/linux-mpv-fullscreen-overlay-refresh.ts @@ -0,0 +1,83 @@ +type LinuxMpvFullscreenOverlayWindow = { + hide: () => void; + isDestroyed: () => boolean; + isVisible: () => boolean; + showInactive: () => void; +}; + +export type LinuxMpvFullscreenOverlayRefreshDeps = { + overlayManager: { + getMainWindow: () => LinuxMpvFullscreenOverlayWindow | null; + getVisibleOverlayVisible: () => boolean; + }; + overlayVisibilityRuntime: { + updateVisibleOverlayVisibility: () => void; + }; + ensureOverlayWindowLevel: (window: LinuxMpvFullscreenOverlayWindow) => void; +}; +export type CancelLinuxMpvFullscreenOverlayRefreshBurst = () => void; + +const LINUX_MPV_FULLSCREEN_OVERLAY_REFRESH_DELAYS_MS = [0, 50, 150, 300, 600] as const; +let linuxMpvFullscreenOverlayRefreshTimeouts: Array> = []; + +function clearLinuxMpvFullscreenOverlayRefreshTimeouts(): void { + for (const timeout of linuxMpvFullscreenOverlayRefreshTimeouts) { + clearTimeout(timeout); + } + linuxMpvFullscreenOverlayRefreshTimeouts = []; +} + +function refreshLinuxVisibleOverlayAfterMpvFullscreenChange( + deps: LinuxMpvFullscreenOverlayRefreshDeps, +): void { + if (process.platform !== 'linux' || !deps.overlayManager.getVisibleOverlayVisible()) { + return; + } + + deps.overlayVisibilityRuntime.updateVisibleOverlayVisibility(); + + const mainWindow = deps.overlayManager.getMainWindow(); + if (!mainWindow || mainWindow.isDestroyed() || !mainWindow.isVisible()) { + return; + } + + mainWindow.hide(); + mainWindow.showInactive(); + deps.ensureOverlayWindowLevel(mainWindow); +} + +export function scheduleLinuxVisibleOverlayFullscreenRefreshBurst( + deps: LinuxMpvFullscreenOverlayRefreshDeps, +): CancelLinuxMpvFullscreenOverlayRefreshBurst { + if (process.platform !== 'linux') { + return () => {}; + } + + clearLinuxMpvFullscreenOverlayRefreshTimeouts(); + for (const delayMs of LINUX_MPV_FULLSCREEN_OVERLAY_REFRESH_DELAYS_MS) { + const refreshTimeout = setTimeout(() => { + linuxMpvFullscreenOverlayRefreshTimeouts = linuxMpvFullscreenOverlayRefreshTimeouts.filter( + (timeout) => timeout !== refreshTimeout, + ); + refreshLinuxVisibleOverlayAfterMpvFullscreenChange(deps); + }, delayMs); + refreshTimeout.unref?.(); + linuxMpvFullscreenOverlayRefreshTimeouts.push(refreshTimeout); + } + return clearLinuxMpvFullscreenOverlayRefreshTimeouts; +} + +export function updateLinuxMpvFullscreenOverlayRefreshBurst( + isFullscreen: boolean, + deps: LinuxMpvFullscreenOverlayRefreshDeps, + cancelCurrentBurst: CancelLinuxMpvFullscreenOverlayRefreshBurst | null, +): CancelLinuxMpvFullscreenOverlayRefreshBurst | null { + cancelCurrentBurst?.(); + if (!isFullscreen) { + return null; + } + + return scheduleLinuxVisibleOverlayFullscreenRefreshBurst(deps); +} + +export { clearLinuxMpvFullscreenOverlayRefreshTimeouts }; diff --git a/src/main/runtime/mpv-client-event-bindings.test.ts b/src/main/runtime/mpv-client-event-bindings.test.ts index c695c765..0a32eb74 100644 --- a/src/main/runtime/mpv-client-event-bindings.test.ts +++ b/src/main/runtime/mpv-client-event-bindings.test.ts @@ -128,6 +128,7 @@ test('mpv event bindings register all expected events', () => { onTimePosChange: () => {}, onDurationChange: () => {}, onPauseChange: () => {}, + onFullscreenChange: () => {}, onSubtitleMetricsChange: () => {}, onSecondarySubtitleVisibility: () => {}, }); @@ -151,6 +152,7 @@ test('mpv event bindings register all expected events', () => { 'time-pos-change', 'duration-change', 'pause-change', + 'fullscreen-change', 'subtitle-metrics-change', 'secondary-subtitle-visibility', ]); diff --git a/src/main/runtime/mpv-client-event-bindings.ts b/src/main/runtime/mpv-client-event-bindings.ts index 3ffa4257..0036cbfd 100644 --- a/src/main/runtime/mpv-client-event-bindings.ts +++ b/src/main/runtime/mpv-client-event-bindings.ts @@ -11,6 +11,7 @@ type MpvBindingEventName = | 'time-pos-change' | 'duration-change' | 'pause-change' + | 'fullscreen-change' | 'subtitle-metrics-change' | 'secondary-subtitle-visibility'; @@ -83,6 +84,7 @@ export function createBindMpvClientEventHandlers(deps: { onTimePosChange: (payload: { time: number }) => void; onDurationChange: (payload: { duration: number }) => void; onPauseChange: (payload: { paused: boolean }) => void; + onFullscreenChange: (payload: { fullscreen: boolean }) => void; onSubtitleMetricsChange: (payload: { patch: Record }) => void; onSecondarySubtitleVisibility: (payload: { visible: boolean }) => void; }) { @@ -99,6 +101,7 @@ export function createBindMpvClientEventHandlers(deps: { mpvClient.on('time-pos-change', deps.onTimePosChange); mpvClient.on('duration-change', deps.onDurationChange); mpvClient.on('pause-change', deps.onPauseChange); + mpvClient.on('fullscreen-change', deps.onFullscreenChange); mpvClient.on('subtitle-metrics-change', deps.onSubtitleMetricsChange); mpvClient.on('secondary-subtitle-visibility', deps.onSecondarySubtitleVisibility); }; diff --git a/src/main/runtime/mpv-main-event-actions.test.ts b/src/main/runtime/mpv-main-event-actions.test.ts index dd32261f..dc862687 100644 --- a/src/main/runtime/mpv-main-event-actions.test.ts +++ b/src/main/runtime/mpv-main-event-actions.test.ts @@ -49,8 +49,37 @@ test('subtitle change handler broadcasts cached annotated payload immediately wh assert.deepEqual(calls, [ 'set:line', 'lookup:line', - 'broadcast:annotated', 'process:line', + 'broadcast:annotated', + 'presence', + ]); +}); + +test('subtitle change handler emits cached annotation after forwarding the subtitle change', () => { + const calls: string[] = []; + const handler = createHandleMpvSubtitleChangeHandler({ + setCurrentSubText: (text) => calls.push(`set:${text}`), + getImmediateSubtitlePayload: (text) => { + calls.push(`lookup:${text}`); + return { text, tokens: [] }; + }, + emitImmediateSubtitle: (payload) => { + calls.push(`emit:${payload.tokens === null ? 'plain' : 'annotated'}`); + }, + broadcastSubtitle: (payload) => { + calls.push(`broadcast:${payload.tokens === null ? 'plain' : 'annotated'}`); + }, + onSubtitleChange: (text) => calls.push(`process:${text}`), + refreshDiscordPresence: () => calls.push('presence'), + }); + + handler({ text: 'line' }); + + assert.deepEqual(calls, [ + 'set:line', + 'lookup:line', + 'process:line', + 'emit:annotated', 'presence', ]); }); @@ -170,6 +199,10 @@ test('time-pos and pause handlers report progress with correct urgency', () => { recordPlaybackPosition: (time) => calls.push(`time:${time}`), reportJellyfinRemoteProgress: (force) => calls.push(`progress:${force ? 'force' : 'normal'}`), refreshDiscordPresence: () => calls.push('presence'), + maybeRunAnilistPostWatchUpdate: async () => { + calls.push('post-watch'); + }, + logError: () => calls.push('post-watch-error'), }); const pauseHandler = createHandleMpvPauseChangeHandler({ recordPauseState: (paused) => calls.push(`pause:${paused ? 'yes' : 'no'}`), @@ -183,12 +216,48 @@ test('time-pos and pause handlers report progress with correct urgency', () => { 'time:12.5', 'progress:normal', 'presence', + 'post-watch', 'pause:yes', 'progress:force', 'presence', ]); }); +test('time-pos handler logs post-watch update rejection without blocking later handlers', async () => { + const calls: string[] = []; + const timeHandler = createHandleMpvTimePosChangeHandler({ + recordPlaybackPosition: (time) => calls.push(`time:${time}`), + reportJellyfinRemoteProgress: (force) => calls.push(`progress:${force ? 'force' : 'normal'}`), + refreshDiscordPresence: () => calls.push('presence'), + maybeRunAnilistPostWatchUpdate: async () => { + calls.push('post-watch'); + throw new Error('boom'); + }, + logError: (message, error) => calls.push(`error:${message}:${(error as Error).message}`), + }); + const pauseHandler = createHandleMpvPauseChangeHandler({ + recordPauseState: (paused) => calls.push(`pause:${paused ? 'yes' : 'no'}`), + reportJellyfinRemoteProgress: (force) => calls.push(`progress:${force ? 'force' : 'normal'}`), + refreshDiscordPresence: () => calls.push('presence'), + }); + + timeHandler({ time: 12.5 }); + pauseHandler({ paused: true }); + await Promise.resolve(); + await Promise.resolve(); + + assert.deepEqual(calls, [ + 'time:12.5', + 'progress:normal', + 'presence', + 'post-watch', + 'pause:yes', + 'progress:force', + 'presence', + 'error:AniList post-watch update failed unexpectedly:boom', + ]); +}); + test('subtitle metrics change handler forwards patch payload', () => { let received: Record | null = null; const handler = createHandleMpvSubtitleMetricsChangeHandler({ diff --git a/src/main/runtime/mpv-main-event-actions.ts b/src/main/runtime/mpv-main-event-actions.ts index 2fd43ead..b2a5952a 100644 --- a/src/main/runtime/mpv-main-event-actions.ts +++ b/src/main/runtime/mpv-main-event-actions.ts @@ -12,14 +12,15 @@ export function createHandleMpvSubtitleChangeHandler(deps: { deps.setCurrentSubText(text); const immediatePayload = deps.getImmediateSubtitlePayload?.(text) ?? null; if (immediatePayload) { + deps.onSubtitleChange(text); (deps.emitImmediateSubtitle ?? deps.broadcastSubtitle)(immediatePayload); } else { deps.broadcastSubtitle({ text, tokens: null, }); + deps.onSubtitleChange(text); } - deps.onSubtitleChange(text); deps.refreshDiscordPresence(); }; } @@ -104,12 +105,17 @@ export function createHandleMpvTimePosChangeHandler(deps: { recordPlaybackPosition: (time: number) => void; reportJellyfinRemoteProgress: (forceImmediate: boolean) => void; refreshDiscordPresence: () => void; + maybeRunAnilistPostWatchUpdate?: () => Promise; + logError?: (message: string, error: unknown) => void; onTimePosUpdate?: (time: number) => void; }) { return ({ time }: { time: number }): void => { deps.recordPlaybackPosition(time); deps.reportJellyfinRemoteProgress(false); deps.refreshDiscordPresence(); + void deps.maybeRunAnilistPostWatchUpdate?.().catch((error) => { + deps.logError?.('AniList post-watch update failed unexpectedly', error); + }); deps.onTimePosUpdate?.(time); }; } diff --git a/src/main/runtime/mpv-main-event-bindings.ts b/src/main/runtime/mpv-main-event-bindings.ts index 941ef212..4641b983 100644 --- a/src/main/runtime/mpv-main-event-bindings.ts +++ b/src/main/runtime/mpv-main-event-bindings.ts @@ -68,6 +68,7 @@ export function createBindMpvMainEventHandlersHandler(deps: { recordMediaDuration: (durationSec: number) => void; reportJellyfinRemoteProgress: (forceImmediate: boolean) => void; onTimePosUpdate?: (time: number) => void; + onFullscreenChange?: (fullscreen: boolean) => void; recordPauseState: (paused: boolean) => void; updateSubtitleRenderMetrics: (patch: Record) => void; @@ -148,6 +149,8 @@ export function createBindMpvMainEventHandlersHandler(deps: { reportJellyfinRemoteProgress: (forceImmediate) => deps.reportJellyfinRemoteProgress(forceImmediate), refreshDiscordPresence: () => deps.refreshDiscordPresence(), + maybeRunAnilistPostWatchUpdate: () => deps.maybeRunAnilistPostWatchUpdate(), + logError: (message, error) => deps.logSubtitleTimingError(message, error), onTimePosUpdate: (time) => deps.onTimePosUpdate?.(time), }); const handleMpvPauseChange = createHandleMpvPauseChangeHandler({ @@ -177,6 +180,7 @@ export function createBindMpvMainEventHandlersHandler(deps: { onTimePosChange: handleMpvTimePosChange, onDurationChange: ({ duration }) => deps.recordMediaDuration(duration), onPauseChange: handleMpvPauseChange, + onFullscreenChange: ({ fullscreen }) => deps.onFullscreenChange?.(fullscreen), onSubtitleMetricsChange: handleMpvSubtitleMetricsChange, onSecondarySubtitleVisibility: handleMpvSecondarySubtitleVisibility, })(mpvClient); diff --git a/src/main/runtime/mpv-main-event-main-deps.test.ts b/src/main/runtime/mpv-main-event-main-deps.test.ts index 9e52f677..5a97c8e8 100644 --- a/src/main/runtime/mpv-main-event-main-deps.test.ts +++ b/src/main/runtime/mpv-main-event-main-deps.test.ts @@ -57,6 +57,7 @@ test('mpv main event main deps map app state updates and delegate callbacks', as updateCurrentMediaTitle: (title) => calls.push(`title:${title}`), resetAnilistMediaGuessState: () => calls.push('reset-guess'), reportJellyfinRemoteProgress: (forceImmediate) => calls.push(`progress:${forceImmediate}`), + onFullscreenChange: (fullscreen) => calls.push(`fullscreen:${fullscreen}`), updateSubtitleRenderMetrics: () => calls.push('metrics'), refreshDiscordPresence: () => calls.push('presence-refresh'), })(); @@ -95,6 +96,7 @@ test('mpv main event main deps map app state updates and delegate callbacks', as deps.notifyImmersionTitleUpdate('title'); deps.recordPlaybackPosition(10); deps.reportJellyfinRemoteProgress(true); + deps.onFullscreenChange?.(true); deps.recordPauseState(true); deps.updateSubtitleRenderMetrics({}); deps.setPreviousSecondarySubVisibility(true); @@ -112,6 +114,7 @@ test('mpv main event main deps map app state updates and delegate callbacks', as assert.ok(calls.includes('sync-immersion')); assert.ok(calls.includes('autoplay:/tmp/video')); assert.ok(calls.includes('metrics')); + assert.ok(calls.includes('fullscreen:true')); assert.ok(calls.includes('presence-refresh')); assert.ok(calls.includes('restore-mpv-sub')); assert.ok(calls.includes('reset-sidebar-layout')); @@ -159,6 +162,48 @@ test('mpv main event main deps wire subtitle callbacks without suppression gate' assert.equal(typeof deps.setCurrentSubText, 'function'); }); +test('mpv main event main deps treat managed playback as quit-on-disconnect', () => { + const deps = createBuildBindMpvMainEventHandlersMainDepsHandler({ + appState: { + initialArgs: { managedPlayback: true }, + overlayRuntimeInitialized: false, + mpvClient: null, + immersionTracker: null, + subtitleTimingTracker: null, + currentSubText: '', + currentSubAssText: '', + playbackPaused: null, + previousSecondarySubVisibility: false, + }, + getQuitOnDisconnectArmed: () => true, + scheduleQuitCheck: () => {}, + quitApp: () => {}, + reportJellyfinRemoteStopped: () => {}, + syncOverlayMpvSubtitleSuppression: () => {}, + maybeRunAnilistPostWatchUpdate: async () => {}, + logSubtitleTimingError: () => {}, + broadcastToOverlayWindows: () => {}, + onSubtitleChange: () => {}, + ensureImmersionTrackerInitialized: () => {}, + updateCurrentMediaPath: () => {}, + restoreMpvSubVisibility: () => {}, + resetSubtitleSidebarEmbeddedLayout: () => {}, + getCurrentAnilistMediaKey: () => null, + resetAnilistMediaTracking: () => {}, + maybeProbeAnilistDuration: () => {}, + ensureAnilistMediaGuess: () => {}, + syncImmersionMediaState: () => {}, + updateCurrentMediaTitle: () => {}, + resetAnilistMediaGuessState: () => {}, + reportJellyfinRemoteProgress: () => {}, + updateSubtitleRenderMetrics: () => {}, + refreshDiscordPresence: () => {}, + })(); + + assert.equal(deps.hasInitialPlaybackQuitOnDisconnectArg(), true); + assert.equal(deps.shouldQuitOnDisconnectWhenOverlayRuntimeInitialized(), true); +}); + test('flushPlaybackPositionOnMediaPathClear ignores disconnected mpv time-pos reads', async () => { const recorded: number[] = []; const deps = createBuildBindMpvMainEventHandlersMainDepsHandler({ diff --git a/src/main/runtime/mpv-main-event-main-deps.ts b/src/main/runtime/mpv-main-event-main-deps.ts index f9acd77f..a703ba13 100644 --- a/src/main/runtime/mpv-main-event-main-deps.ts +++ b/src/main/runtime/mpv-main-event-main-deps.ts @@ -2,7 +2,11 @@ import type { MergedToken, SubtitleData } from '../../types'; export function createBuildBindMpvMainEventHandlersMainDepsHandler(deps: { appState: { - initialArgs?: { jellyfinPlay?: unknown; youtubePlay?: unknown } | null; + initialArgs?: { + jellyfinPlay?: unknown; + managedPlayback?: unknown; + youtubePlay?: unknown; + } | null; overlayRuntimeInitialized: boolean; mpvClient: { connected?: boolean; @@ -60,6 +64,7 @@ export function createBuildBindMpvMainEventHandlersMainDepsHandler(deps: { resetAnilistMediaGuessState: () => void; reportJellyfinRemoteProgress: (forceImmediate: boolean) => void; onTimePosUpdate?: (time: number) => void; + onFullscreenChange?: (fullscreen: boolean) => void; updateSubtitleRenderMetrics: (patch: Record) => void; refreshDiscordPresence: () => void; ensureImmersionTrackerInitialized: () => void; @@ -73,15 +78,19 @@ export function createBuildBindMpvMainEventHandlersMainDepsHandler(deps: { deps.ensureImmersionTrackerInitialized(); deps.appState.immersionTracker?.recordPlaybackPosition?.(normalizedTimeSec); }; + const hasInitialPlaybackQuitOnDisconnectArg = (): boolean => + Boolean( + deps.appState.initialArgs?.managedPlayback || + deps.appState.initialArgs?.jellyfinPlay || + deps.appState.initialArgs?.youtubePlay, + ); return () => ({ reportJellyfinRemoteStopped: () => deps.reportJellyfinRemoteStopped(), syncOverlayMpvSubtitleSuppression: () => deps.syncOverlayMpvSubtitleSuppression(), - hasInitialPlaybackQuitOnDisconnectArg: () => - Boolean(deps.appState.initialArgs?.jellyfinPlay || deps.appState.initialArgs?.youtubePlay), + hasInitialPlaybackQuitOnDisconnectArg, isOverlayRuntimeInitialized: () => deps.appState.overlayRuntimeInitialized, - shouldQuitOnDisconnectWhenOverlayRuntimeInitialized: () => - Boolean(deps.appState.initialArgs?.youtubePlay), + shouldQuitOnDisconnectWhenOverlayRuntimeInitialized: hasInitialPlaybackQuitOnDisconnectArg, isQuitOnDisconnectArmed: () => deps.getQuitOnDisconnectArmed(), scheduleQuitCheck: (callback: () => void) => deps.scheduleQuitCheck(callback), isMpvConnected: () => Boolean(deps.appState.mpvClient?.connected), @@ -176,6 +185,9 @@ export function createBuildBindMpvMainEventHandlersMainDepsHandler(deps: { onTimePosUpdate: deps.onTimePosUpdate ? (time: number) => deps.onTimePosUpdate!(time) : undefined, + onFullscreenChange: deps.onFullscreenChange + ? (fullscreen: boolean) => deps.onFullscreenChange!(fullscreen) + : undefined, recordPauseState: (paused: boolean) => { deps.appState.playbackPaused = paused; deps.ensureImmersionTrackerInitialized(); diff --git a/src/main/runtime/startup-config.test.ts b/src/main/runtime/startup-config.test.ts index 02fd9f9d..96f560b9 100644 --- a/src/main/runtime/startup-config.test.ts +++ b/src/main/runtime/startup-config.test.ts @@ -50,7 +50,7 @@ test('createReloadConfigHandler runs success flow with warnings', async () => { assert.equal(showedWarningDialog, process.platform === 'darwin'); assert.ok(calls.some((entry) => entry.includes('actual=10 fallback=250'))); assert.ok(calls.includes('hotReload:start')); - assert.deepEqual(refreshCalls, [{ force: true }]); + assert.deepEqual(refreshCalls, [{ force: true, allowSetupPrompt: false }]); }); test('createReloadConfigHandler fails startup for parse errors', () => { diff --git a/src/main/runtime/startup-config.ts b/src/main/runtime/startup-config.ts index 12b79cfd..c463a188 100644 --- a/src/main/runtime/startup-config.ts +++ b/src/main/runtime/startup-config.ts @@ -27,7 +27,10 @@ export type ReloadConfigRuntimeDeps = { logWarning: (message: string) => void; showDesktopNotification: (title: string, options: { body: string }) => void; startConfigHotReload: () => void; - refreshAnilistClientSecretState: (options: { force: boolean }) => Promise; + refreshAnilistClientSecretState: (options: { + force: boolean; + allowSetupPrompt?: boolean; + }) => Promise; failHandlers: { logError: (details: string) => void; showErrorBox: (title: string, details: string) => void; @@ -72,7 +75,7 @@ export function createReloadConfigHandler(deps: ReloadConfigRuntimeDeps): () => } deps.startConfigHotReload(); - void deps.refreshAnilistClientSecretState({ force: true }); + void deps.refreshAnilistClientSecretState({ force: true, allowSetupPrompt: false }); }; } diff --git a/src/main/runtime/stats-server-routing.test.ts b/src/main/runtime/stats-server-routing.test.ts index df799895..496b3f30 100644 --- a/src/main/runtime/stats-server-routing.test.ts +++ b/src/main/runtime/stats-server-routing.test.ts @@ -42,7 +42,7 @@ test('stats server routing defers to a live background daemon from another proce processAlive: true, }); - assert.deepEqual(handler(), { url: 'http://127.0.0.1:7979', source: 'foreign' }); + assert.deepEqual(handler(), { url: 'http://127.0.0.1:7979', source: 'background' }); assert.deepEqual(calls, ['readBackgroundState', 'isProcessAlive']); }); diff --git a/src/main/runtime/stats-server-routing.ts b/src/main/runtime/stats-server-routing.ts index ab38949e..b2a42149 100644 --- a/src/main/runtime/stats-server-routing.ts +++ b/src/main/runtime/stats-server-routing.ts @@ -14,9 +14,7 @@ function formatStatsServerUrl(port: number): string { return `http://127.0.0.1:${port}`; } -export type EnsureStatsServerUrlResult = - | { url: string; source: 'foreign' } - | { url: string; source: 'local' }; +export type EnsureStatsServerUrlResult = { url: string; source: 'background' | 'local' }; export function createEnsureStatsServerUrlHandler( deps: EnsureStatsServerUrlDeps, @@ -30,7 +28,7 @@ export function createEnsureStatsServerUrlHandler( } else if (!deps.isProcessAlive(state.pid)) { deps.removeBackgroundState(); } else if (state.pid !== deps.currentPid) { - return { url: formatStatsServerUrl(state.port), source: 'foreign' }; + return { url: formatStatsServerUrl(state.port), source: 'background' }; } if (!deps.hasLocalStatsServer()) { diff --git a/src/renderer/handlers/keyboard.test.ts b/src/renderer/handlers/keyboard.test.ts index 6560ed43..7178bac8 100644 --- a/src/renderer/handlers/keyboard.test.ts +++ b/src/renderer/handlers/keyboard.test.ts @@ -1202,6 +1202,32 @@ test('session binding: copy subtitle multiple captures follow-up digit locally', } }); +test('session binding: mine sentence multiple captures modified follow-up digit locally', async () => { + const { handlers, testGlobals } = createKeyboardHandlerHarness(); + + try { + await handlers.setupMpvInputForwarding(); + handlers.updateSessionBindings([ + { + sourcePath: 'shortcuts.mineSentenceMultiple', + originalKey: 'Ctrl+Shift+S', + key: { code: 'KeyS', modifiers: ['ctrl', 'shift'] }, + actionType: 'session-action', + actionId: 'mineSentenceMultiple', + }, + ] as never); + + testGlobals.dispatchKeydown({ key: 'S', code: 'KeyS', ctrlKey: true, shiftKey: true }); + testGlobals.dispatchKeydown({ key: '#', code: 'Digit3', ctrlKey: true, shiftKey: true }); + + assert.deepEqual(testGlobals.sessionActions, [ + { actionId: 'mineSentenceMultiple', payload: { count: 3 } }, + ]); + } finally { + testGlobals.restore(); + } +}); + test('keyboard mode: h moves left when popup is closed', async () => { const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness(); diff --git a/src/renderer/handlers/keyboard.ts b/src/renderer/handlers/keyboard.ts index 12ccdd6b..3153ef0a 100644 --- a/src/renderer/handlers/keyboard.ts +++ b/src/renderer/handlers/keyboard.ts @@ -176,13 +176,17 @@ export function createKeyboardHandlers( return true; } - if (!/^[1-9]$/.test(e.key) || e.ctrlKey || e.metaKey || e.altKey || e.shiftKey) { + const digit = /^[1-9]$/.test(e.key) + ? e.key + : (e.code.match(/^(?:Digit|Numpad)([1-9])$/)?.[1] ?? null); + + if (!digit) { e.preventDefault(); return true; } e.preventDefault(); - const count = Number(e.key); + const count = Number(digit); const actionId = pendingNumericSelection.actionId; cancelPendingNumericSelection(false); void window.electronAPI.dispatchSessionAction(actionId, { count }); diff --git a/src/renderer/handlers/mouse.test.ts b/src/renderer/handlers/mouse.test.ts index 03f89aea..f6e64ff3 100644 --- a/src/renderer/handlers/mouse.test.ts +++ b/src/renderer/handlers/mouse.test.ts @@ -1315,6 +1315,75 @@ test('window resize ignores synthetic subtitle enter until the pointer moves aga } }); +test('window resize allows primary hover pause from a real mouseenter over subtitles', async () => { + const ctx = createMouseTestContext(); + const originalWindow = globalThis.window; + const originalDocument = globalThis.document; + const mpvCommands: Array<(string | number)[]> = []; + const windowListeners = new Map void>>(); + ctx.platform.shouldToggleMouseIgnore = true; + + Object.defineProperty(globalThis, 'window', { + configurable: true, + value: { + electronAPI: { + setIgnoreMouseEvents: () => {}, + }, + addEventListener: (type: string, listener: () => void) => { + const bucket = windowListeners.get(type) ?? []; + bucket.push(listener); + windowListeners.set(type, bucket); + }, + getComputedStyle: () => ({ + visibility: 'hidden', + display: 'none', + opacity: '0', + }), + focus: () => {}, + innerHeight: 1000, + }, + }); + Object.defineProperty(globalThis, 'document', { + configurable: true, + value: { + addEventListener: () => {}, + elementFromPoint: (x: number, y: number) => + x === 120 && y === 240 ? ctx.dom.subtitleContainer : null, + querySelectorAll: () => [], + body: {}, + }, + }); + + try { + const handlers = createMouseHandlers(ctx as never, { + modalStateReader: { + isAnySettingsModalOpen: () => false, + isAnyModalOpen: () => false, + }, + applyYPercent: () => {}, + getCurrentYPercent: () => 10, + persistSubtitlePositionPatch: () => {}, + getSubtitleHoverAutoPauseEnabled: () => true, + getYomitanPopupAutoPauseEnabled: () => false, + getPlaybackPaused: async () => false, + sendMpvCommand: (command) => { + mpvCommands.push(command); + }, + }); + + handlers.setupResizeHandler(); + for (const listener of windowListeners.get('resize') ?? []) { + listener(); + } + + await handlers.handlePrimaryMouseEnter({ clientX: 120, clientY: 240 } as MouseEvent); + assert.deepEqual(mpvCommands, [['set_property', 'pause', 'yes']]); + } finally { + Object.defineProperty(globalThis, 'window', { configurable: true, value: originalWindow }); + Object.defineProperty(globalThis, 'document', { configurable: true, value: originalDocument }); + } +}); + test('visibility recovery keeps overlay click-through when pointer is not over subtitles', () => { const ctx = createMouseTestContext(); const originalWindow = globalThis.window; @@ -1428,7 +1497,8 @@ test('pointer tracking enables overlay interaction as soon as the cursor reaches bucket.push(listener); documentListeners.set(type, bucket); }, - elementFromPoint: () => ctx.dom.subtitleContainer, + elementFromPoint: (x: number, y: number) => + x === 120 && y === 240 ? ctx.dom.subtitleContainer : null, querySelectorAll: () => [], body: {}, }, diff --git a/src/renderer/handlers/mouse.ts b/src/renderer/handlers/mouse.ts index 7d193be9..a7063388 100644 --- a/src/renderer/handlers/mouse.ts +++ b/src/renderer/handlers/mouse.ts @@ -300,12 +300,15 @@ export function createMouseHandlers( } async function handleMouseEnter( - _event?: MouseEvent, + event?: MouseEvent, showSecondaryHover = false, source: 'direct' | 'tracked-pointer' = 'direct', ): Promise { if (source === 'direct' && suppressDirectHoverEnterSource !== null) { - return; + if (!event || !syncHoverStateFromPoint(event.clientX, event.clientY).isOverSubtitle) { + return; + } + suppressDirectHoverEnterSource = null; } ctx.state.isOverSubtitle = true; diff --git a/src/renderer/overlay-legacy-cleanup.test.ts b/src/renderer/overlay-legacy-cleanup.test.ts index 4566f270..4ee0bbb3 100644 --- a/src/renderer/overlay-legacy-cleanup.test.ts +++ b/src/renderer/overlay-legacy-cleanup.test.ts @@ -28,6 +28,18 @@ test('renderer stylesheet no longer contains invisible-layer selectors', () => { assert.doesNotMatch(cssSource, /body\.layer-invisible/); }); +test('renderer stylesheet only hides visible focus chrome on top-level overlay focus targets', () => { + const cssSource = readWorkspaceFile('src/renderer/style.css'); + assert.match( + cssSource, + /html:focus-visible,\s*body:focus-visible,\s*#overlay:focus-visible\s*\{[^}]*outline:\s*none;/s, + ); + assert.doesNotMatch( + cssSource, + /html:focus,\s*body:focus,\s*#overlay:focus\s*\{[^}]*outline:\s*none;/s, + ); +}); + test('top-level readme avoids stale overlay-layers wording', () => { const readmeSource = readWorkspaceFile('README.md'); assert.doesNotMatch(readmeSource, /overlay layers/i); diff --git a/src/renderer/style.css b/src/renderer/style.css index 80dfca07..cf8d3875 100644 --- a/src/renderer/style.css +++ b/src/renderer/style.css @@ -40,6 +40,12 @@ body { 'Hiragino Kaku Gothic ProN', 'Yu Gothic', 'Arial Unicode MS', Arial, sans-serif; } +html:focus-visible, +body:focus-visible, +#overlay:focus-visible { + outline: none; +} + :root { --subtitle-sidebar-reserved-width: 0px; @@ -794,11 +800,8 @@ body.settings-modal-open [data-subminer-yomitan-popup-host='true'] { } #subtitleRoot .word.word-jlpt-n1 { - text-decoration-line: underline; - text-decoration-color: var(--subtitle-jlpt-n1-color, #ed8796); - text-decoration-thickness: 0.08em; - text-underline-offset: 0.12em; - text-decoration-skip-ink: none; + text-decoration-line: none; + border-bottom: 2px solid var(--subtitle-jlpt-n1-color, #ed8796); } #subtitleRoot .word.word-jlpt-n1[data-jlpt-level]::after { @@ -806,11 +809,8 @@ body.settings-modal-open [data-subminer-yomitan-popup-host='true'] { } #subtitleRoot .word.word-jlpt-n2 { - text-decoration-line: underline; - text-decoration-color: var(--subtitle-jlpt-n2-color, #f5a97f); - text-decoration-thickness: 0.08em; - text-underline-offset: 0.12em; - text-decoration-skip-ink: none; + text-decoration-line: none; + border-bottom: 2px solid var(--subtitle-jlpt-n2-color, #f5a97f); } #subtitleRoot .word.word-jlpt-n2[data-jlpt-level]::after { @@ -818,11 +818,8 @@ body.settings-modal-open [data-subminer-yomitan-popup-host='true'] { } #subtitleRoot .word.word-jlpt-n3 { - text-decoration-line: underline; - text-decoration-color: var(--subtitle-jlpt-n3-color, #f9e2af); - text-decoration-thickness: 0.08em; - text-underline-offset: 0.12em; - text-decoration-skip-ink: none; + text-decoration-line: none; + border-bottom: 2px solid var(--subtitle-jlpt-n3-color, #f9e2af); } #subtitleRoot .word.word-jlpt-n3[data-jlpt-level]::after { @@ -830,11 +827,8 @@ body.settings-modal-open [data-subminer-yomitan-popup-host='true'] { } #subtitleRoot .word.word-jlpt-n4 { - text-decoration-line: underline; - text-decoration-color: var(--subtitle-jlpt-n4-color, #a6e3a1); - text-decoration-thickness: 0.08em; - text-underline-offset: 0.12em; - text-decoration-skip-ink: none; + text-decoration-line: none; + border-bottom: 2px solid var(--subtitle-jlpt-n4-color, #a6e3a1); } #subtitleRoot .word.word-jlpt-n4[data-jlpt-level]::after { @@ -842,11 +836,8 @@ body.settings-modal-open [data-subminer-yomitan-popup-host='true'] { } #subtitleRoot .word.word-jlpt-n5 { - text-decoration-line: underline; - text-decoration-color: var(--subtitle-jlpt-n5-color, #8aadf4); - text-decoration-thickness: 0.08em; - text-underline-offset: 0.12em; - text-decoration-skip-ink: none; + text-decoration-line: none; + border-bottom: 2px solid var(--subtitle-jlpt-n5-color, #8aadf4); } #subtitleRoot .word.word-jlpt-n5[data-jlpt-level]::after { @@ -997,6 +988,91 @@ body.settings-modal-open [data-subminer-yomitan-popup-host='true'] { -webkit-text-fill-color: var(--subtitle-frequency-band-5-color, #8aadf4) !important; } +#subtitleRoot .word.word-jlpt-n1.word-known, +#subtitleRoot .word.word-jlpt-n1.word-n-plus-one, +#subtitleRoot .word.word-jlpt-n1.word-name-match, +#subtitleRoot .word.word-jlpt-n1.word-frequency-single, +#subtitleRoot .word.word-jlpt-n1.word-frequency-band-1, +#subtitleRoot .word.word-jlpt-n1.word-frequency-band-2, +#subtitleRoot .word.word-jlpt-n1.word-frequency-band-3, +#subtitleRoot .word.word-jlpt-n1.word-frequency-band-4, +#subtitleRoot .word.word-jlpt-n1.word-frequency-band-5, +#subtitleRoot .word.word-jlpt-n1:hover, +#subtitleRoot .word.word-jlpt-n1 .c:hover, +#subtitleRoot .word.word-jlpt-n1::selection, +#subtitleRoot .word.word-jlpt-n1 .c::selection { + text-decoration-color: var(--subtitle-jlpt-n1-color, #ed8796) !important; + -webkit-text-decoration-color: var(--subtitle-jlpt-n1-color, #ed8796) !important; +} + +#subtitleRoot .word.word-jlpt-n2.word-known, +#subtitleRoot .word.word-jlpt-n2.word-n-plus-one, +#subtitleRoot .word.word-jlpt-n2.word-name-match, +#subtitleRoot .word.word-jlpt-n2.word-frequency-single, +#subtitleRoot .word.word-jlpt-n2.word-frequency-band-1, +#subtitleRoot .word.word-jlpt-n2.word-frequency-band-2, +#subtitleRoot .word.word-jlpt-n2.word-frequency-band-3, +#subtitleRoot .word.word-jlpt-n2.word-frequency-band-4, +#subtitleRoot .word.word-jlpt-n2.word-frequency-band-5, +#subtitleRoot .word.word-jlpt-n2:hover, +#subtitleRoot .word.word-jlpt-n2 .c:hover, +#subtitleRoot .word.word-jlpt-n2::selection, +#subtitleRoot .word.word-jlpt-n2 .c::selection { + text-decoration-color: var(--subtitle-jlpt-n2-color, #f5a97f) !important; + -webkit-text-decoration-color: var(--subtitle-jlpt-n2-color, #f5a97f) !important; +} + +#subtitleRoot .word.word-jlpt-n3.word-known, +#subtitleRoot .word.word-jlpt-n3.word-n-plus-one, +#subtitleRoot .word.word-jlpt-n3.word-name-match, +#subtitleRoot .word.word-jlpt-n3.word-frequency-single, +#subtitleRoot .word.word-jlpt-n3.word-frequency-band-1, +#subtitleRoot .word.word-jlpt-n3.word-frequency-band-2, +#subtitleRoot .word.word-jlpt-n3.word-frequency-band-3, +#subtitleRoot .word.word-jlpt-n3.word-frequency-band-4, +#subtitleRoot .word.word-jlpt-n3.word-frequency-band-5, +#subtitleRoot .word.word-jlpt-n3:hover, +#subtitleRoot .word.word-jlpt-n3 .c:hover, +#subtitleRoot .word.word-jlpt-n3::selection, +#subtitleRoot .word.word-jlpt-n3 .c::selection { + text-decoration-color: var(--subtitle-jlpt-n3-color, #f9e2af) !important; + -webkit-text-decoration-color: var(--subtitle-jlpt-n3-color, #f9e2af) !important; +} + +#subtitleRoot .word.word-jlpt-n4.word-known, +#subtitleRoot .word.word-jlpt-n4.word-n-plus-one, +#subtitleRoot .word.word-jlpt-n4.word-name-match, +#subtitleRoot .word.word-jlpt-n4.word-frequency-single, +#subtitleRoot .word.word-jlpt-n4.word-frequency-band-1, +#subtitleRoot .word.word-jlpt-n4.word-frequency-band-2, +#subtitleRoot .word.word-jlpt-n4.word-frequency-band-3, +#subtitleRoot .word.word-jlpt-n4.word-frequency-band-4, +#subtitleRoot .word.word-jlpt-n4.word-frequency-band-5, +#subtitleRoot .word.word-jlpt-n4:hover, +#subtitleRoot .word.word-jlpt-n4 .c:hover, +#subtitleRoot .word.word-jlpt-n4::selection, +#subtitleRoot .word.word-jlpt-n4 .c::selection { + text-decoration-color: var(--subtitle-jlpt-n4-color, #a6e3a1) !important; + -webkit-text-decoration-color: var(--subtitle-jlpt-n4-color, #a6e3a1) !important; +} + +#subtitleRoot .word.word-jlpt-n5.word-known, +#subtitleRoot .word.word-jlpt-n5.word-n-plus-one, +#subtitleRoot .word.word-jlpt-n5.word-name-match, +#subtitleRoot .word.word-jlpt-n5.word-frequency-single, +#subtitleRoot .word.word-jlpt-n5.word-frequency-band-1, +#subtitleRoot .word.word-jlpt-n5.word-frequency-band-2, +#subtitleRoot .word.word-jlpt-n5.word-frequency-band-3, +#subtitleRoot .word.word-jlpt-n5.word-frequency-band-4, +#subtitleRoot .word.word-jlpt-n5.word-frequency-band-5, +#subtitleRoot .word.word-jlpt-n5:hover, +#subtitleRoot .word.word-jlpt-n5 .c:hover, +#subtitleRoot .word.word-jlpt-n5::selection, +#subtitleRoot .word.word-jlpt-n5 .c::selection { + text-decoration-color: var(--subtitle-jlpt-n5-color, #8aadf4) !important; + -webkit-text-decoration-color: var(--subtitle-jlpt-n5-color, #8aadf4) !important; +} + #subtitleRoot .word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not( .word-known diff --git a/src/renderer/subtitle-render.test.ts b/src/renderer/subtitle-render.test.ts index 323b19ba..c008bc8c 100644 --- a/src/renderer/subtitle-render.test.ts +++ b/src/renderer/subtitle-render.test.ts @@ -220,8 +220,20 @@ function normalizeCssSelector(selector: string): string { .trim(); } -function buildJlptUnderlineSelector(level: number): string { - return `#subtitleRoot .word.word-jlpt-n${level}`; +function buildJlptColorSelector(level: number): string { + const higherPriorityClasses = [ + '.word-known', + '.word-n-plus-one', + '.word-name-match', + '.word-frequency-single', + '.word-frequency-band-1', + '.word-frequency-band-2', + '.word-frequency-band-3', + '.word-frequency-band-4', + '.word-frequency-band-5', + ].join(', '); + + return `#subtitleRoot .word.word-jlpt-n${level}:not(:is(${higherPriorityClasses}))`; } test('computeWordClass preserves known and n+1 classes while adding JLPT classes', () => { @@ -887,20 +899,32 @@ test('subtitle annotation CSS underlines JLPT tokens without changing token colo const cssText = fs.readFileSync(cssPath, 'utf-8'); for (let level = 1; level <= 5; level += 1) { - const block = extractClassBlock(cssText, buildJlptUnderlineSelector(level)); - assert.ok(block.length > 0, `word-jlpt-n${level} class should exist`); - assert.doesNotMatch(block, /(?:^|\n)\s*color\s*:/m); - assert.doesNotMatch(block, /-webkit-text-fill-color\s*:/); - assert.match(block, /text-decoration-line:\s*underline;/); + const plainJlptBlock = extractClassBlock(cssText, `#subtitleRoot .word.word-jlpt-n${level}`); + // JLPT tagging must never recolor the token text — other annotations own + // text color. JLPT also must not use `text-decoration: underline`, + // because Chromium repaints text-decoration during ::selection and the + // underline would adopt the other annotation's color during a Yomitan + // lookup. The underline is drawn by `border-bottom`, which is unaffected + // by ::selection and stays locked on the JLPT level color regardless of + // popup/selection state. + assert.doesNotMatch(plainJlptBlock, /(?:^|\n)\s*color\s*:/m); + assert.doesNotMatch(plainJlptBlock, /text-decoration-line:\s*underline;/); + assert.doesNotMatch(plainJlptBlock, /text-decoration\s*:[^;]*\bunderline\b/i); assert.match( - block, - new RegExp(`text-decoration-color:\\s*var\\(--subtitle-jlpt-n${level}-color,`), + plainJlptBlock, + new RegExp(`border-bottom:\\s*2px\\s+solid\\s+var\\(--subtitle-jlpt-n${level}-color,`), + `JLPT level must paint a permanent 2px border-bottom in the level color`, + ); + + // JLPT tagging must communicate level *only* via the underline; it must + // never recolor the token text. Other annotations (known, n+1, frequency, + // name match) are responsible for token text color. + const jlptOnlyColorBlock = extractClassBlock(cssText, buildJlptColorSelector(level)); + assert.equal( + jlptOnlyColorBlock, + '', + `word-jlpt-n${level} (without other annotations) must not set text color — JLPT only paints the underline`, ); - assert.doesNotMatch(block, /border-bottom\s*:/); - assert.doesNotMatch(block, /padding-bottom\s*:/); - assert.doesNotMatch(block, /box-decoration-break\s*:/); - assert.doesNotMatch(block, /-webkit-box-decoration-break\s*:/); - assert.doesNotMatch(block, /text-shadow\s*:/); } for (const selector of [ @@ -1064,6 +1088,55 @@ test('subtitle annotation CSS underlines JLPT tokens without changing token colo /-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/, ); + for (let level = 1; level <= 5; level += 1) { + const jlptSelectionLockBlock = extractClassBlock( + cssText, + `#subtitleRoot .word.word-jlpt-n${level}::selection`, + ); + assert.ok(jlptSelectionLockBlock.length > 0, `word-jlpt-n${level} selection lock should exist`); + assert.match( + jlptSelectionLockBlock, + new RegExp( + `text-decoration-color:\\s*var\\(--subtitle-jlpt-n${level}-color,\\s*#[0-9a-f]{6}\\)\\s*!important;`, + 'i', + ), + ); + + for (const annotationClass of [ + 'word-known', + 'word-n-plus-one', + 'word-name-match', + 'word-frequency-single', + 'word-frequency-band-2', + ]) { + const combinedAnnotationBlock = extractClassBlock( + cssText, + `#subtitleRoot .word.word-jlpt-n${level}.${annotationClass}`, + ); + assert.match( + combinedAnnotationBlock, + new RegExp( + `text-decoration-color:\\s*var\\(--subtitle-jlpt-n${level}-color,\\s*#[0-9a-f]{6}\\)\\s*!important;`, + 'i', + ), + `combined JLPT ${annotationClass} selector should lock underline color`, + ); + } + + const jlptCharHoverBlock = extractClassBlock( + cssText, + `#subtitleRoot .word.word-jlpt-n${level} .c:hover`, + ); + assert.match( + jlptCharHoverBlock, + new RegExp( + `text-decoration-color:\\s*var\\(--subtitle-jlpt-n${level}-color,\\s*#[0-9a-f]{6}\\)\\s*!important;`, + 'i', + ), + 'JLPT character hover selector should lock underline color', + ); + } + const selectionBlock = extractClassBlock(cssText, '#subtitleRoot::selection'); assert.match( selectionBlock, diff --git a/src/token-merger.ts b/src/token-merger.ts index 493725c4..dd69866c 100644 --- a/src/token-merger.ts +++ b/src/token-merger.ts @@ -177,8 +177,7 @@ export function mergeTokens( } const result: MergedToken[] = []; - const normalizedSourceText = - typeof sourceText === 'string' ? sourceText.replace(/\r?\n/g, ' ').trim() : null; + const normalizedSourceText = normalizeSourceTextForTokenOffsets(sourceText); let charOffset = 0; let sourceCursor = 0; let lastStandaloneToken: Token | null = null; @@ -191,7 +190,9 @@ export function mergeTokens( for (const token of tokens) { const matchedStart = - normalizedSourceText !== null ? normalizedSourceText.indexOf(token.word, sourceCursor) : -1; + typeof normalizedSourceText === 'string' + ? normalizedSourceText.indexOf(token.word, sourceCursor) + : -1; const start = matchedStart >= sourceCursor ? matchedStart : charOffset; const end = start + token.word.length; charOffset = end; @@ -282,6 +283,49 @@ function isExcludedByTagSet(normalizedTag: string, exclusions: ReadonlySet exclusions.has(part)); } +function isKanaChar(char: string): boolean { + const code = char.codePointAt(0); + if (code === undefined) { + return false; + } + + return ( + (code >= 0x3041 && code <= 0x3096) || + (code >= 0x309b && code <= 0x309f) || + code === 0x30fc || + (code >= 0x30a0 && code <= 0x30fa) || + (code >= 0x30fd && code <= 0x30ff) + ); +} + +function isKanaCandidateIgnorableChar(char: string): boolean { + return /^[\s.,!?;:()[\]{}"'`、。!?…‥・「」『』()[]{}〈〉《》【】―-]$/u.test(char); +} + +function isKanaOnlyText(text: string): boolean { + const normalized = text.trim(); + if (normalized.length === 0) { + return false; + } + + let hasKana = false; + for (const char of normalized) { + if (isKanaChar(char)) { + hasKana = true; + continue; + } + if (!isKanaCandidateIgnorableChar(char)) { + return false; + } + } + + return hasKana; +} + +function normalizeSourceTextForTokenOffsets(sourceText: string | undefined): string | undefined { + return typeof sourceText === 'string' ? sourceText.replace(/\r?\n/g, ' ').trim() : undefined; +} + export function isNPlusOneCandidateToken( token: MergedToken, pos1Exclusions: ReadonlySet = N_PLUS_ONE_IGNORED_POS1, @@ -290,6 +334,9 @@ export function isNPlusOneCandidateToken( if (token.isKnown) { return false; } + if (isKanaOnlyText(token.surface)) { + return false; + } return isNPlusOneWordCountToken(token, pos1Exclusions, pos2Exclusions); } @@ -339,6 +386,18 @@ function isNPlusOneWordCountToken( return true; } +function isNPlusOneSentenceLengthToken( + token: MergedToken, + pos1Exclusions: ReadonlySet = N_PLUS_ONE_IGNORED_POS1, + pos2Exclusions: ReadonlySet = N_PLUS_ONE_IGNORED_POS2, +): boolean { + if (!isNPlusOneWordCountToken(token, pos1Exclusions, pos2Exclusions)) { + return false; + } + + return token.isKnown || isNPlusOneCandidateToken(token, pos1Exclusions, pos2Exclusions); +} + function isSentenceBoundaryToken(token: MergedToken): boolean { if (token.partOfSpeech !== PartOfSpeech.symbol) { return false; @@ -347,22 +406,39 @@ function isSentenceBoundaryToken(token: MergedToken): boolean { return SENTENCE_BOUNDARY_SURFACES.has(token.surface); } +function hasSentenceBoundaryInSourceGap( + sourceText: string | undefined, + previousEnd: number | null, + nextStart: number, +): boolean { + if (typeof sourceText !== 'string' || previousEnd === null || nextStart <= previousEnd) { + return false; + } + + const gap = sourceText.slice(previousEnd, nextStart); + return [...gap].some((char) => SENTENCE_BOUNDARY_SURFACES.has(char)); +} + export function markNPlusOneTargets( tokens: MergedToken[], minSentenceWords = 3, pos1Exclusions: ReadonlySet = N_PLUS_ONE_IGNORED_POS1, pos2Exclusions: ReadonlySet = N_PLUS_ONE_IGNORED_POS2, + sourceText?: string, ): MergedToken[] { if (tokens.length === 0) { return []; } + const normalizedSourceText = normalizeSourceTextForTokenOffsets(sourceText); + const markedTokens = tokens.map((token) => ({ ...token, isNPlusOneTarget: false, })); let sentenceStart = 0; + let previousTokenEnd: number | null = null; const minimumSentenceWords = Number.isInteger(minSentenceWords) ? Math.max(1, minSentenceWords) : 3; @@ -373,7 +449,7 @@ export function markNPlusOneTargets( for (let i = start; i < endExclusive; i++) { const token = markedTokens[i]; if (!token) continue; - if (isNPlusOneWordCountToken(token, pos1Exclusions, pos2Exclusions)) { + if (isNPlusOneSentenceLengthToken(token, pos1Exclusions, pos2Exclusions)) { sentenceWordCount += 1; } @@ -393,10 +469,15 @@ export function markNPlusOneTargets( for (let i = 0; i < markedTokens.length; i++) { const token = markedTokens[i]; if (!token) continue; + if (hasSentenceBoundaryInSourceGap(normalizedSourceText, previousTokenEnd, token.startPos)) { + markSentence(sentenceStart, i); + sentenceStart = i; + } if (isSentenceBoundaryToken(token)) { markSentence(sentenceStart, i); sentenceStart = i + 1; } + previousTokenEnd = token.endPos; } if (sentenceStart < markedTokens.length) { diff --git a/src/window-trackers/hyprland-tracker.test.ts b/src/window-trackers/hyprland-tracker.test.ts index 66f4d869..6df5e8ce 100644 --- a/src/window-trackers/hyprland-tracker.test.ts +++ b/src/window-trackers/hyprland-tracker.test.ts @@ -1,9 +1,13 @@ import test from 'node:test'; import assert from 'node:assert/strict'; import { + isHyprlandGeometryEvent, parseHyprctlClients, + parseHyprctlMonitors, + resolveHyprlandWindowGeometry, selectHyprlandMpvWindow, type HyprlandClient, + type HyprlandMonitor, } from './hyprland-tracker'; function makeClient(overrides: Partial = {}): HyprlandClient { @@ -19,6 +23,17 @@ function makeClient(overrides: Partial = {}): HyprlandClient { }; } +function makeMonitor(overrides: Partial = {}): HyprlandMonitor { + return { + id: 0, + x: 0, + y: 0, + width: 1920, + height: 1080, + ...overrides, + }; +} + test('selectHyprlandMpvWindow ignores hidden and unmapped mpv clients', () => { const selected = selectHyprlandMpvWindow( [ @@ -106,3 +121,59 @@ test('parseHyprctlClients tolerates non-json prefix output', () => { }, ]); }); + +test('parseHyprctlMonitors returns null for malformed JSON output', () => { + assert.equal(parseHyprctlMonitors('not-json'), null); + assert.equal(parseHyprctlMonitors('[{"id":0,"x":0,"y":0,"width":1920'), null); +}); + +test('isHyprlandGeometryEvent treats geometry events as geometry-changing only', () => { + assert.equal(isHyprlandGeometryEvent('fullscreenv2'), true); + assert.equal(isHyprlandGeometryEvent('workspacev2'), true); + assert.equal(isHyprlandGeometryEvent('windowtitle'), false); + assert.equal(isHyprlandGeometryEvent('windowtitlev2'), false); + assert.equal(isHyprlandGeometryEvent('activewindowv2'), false); +}); + +test('resolveHyprlandWindowGeometry uses monitor bounds for fullscreen clients', () => { + const geometry = resolveHyprlandWindowGeometry( + makeClient({ + at: [60, 80], + size: [1280, 720], + monitor: 1, + fullscreen: 2, + fullscreenClient: 2, + }), + [ + makeMonitor({ id: 0, x: 0, y: 0, width: 1920, height: 1080 }), + makeMonitor({ id: 1, x: 1920, y: 0, width: 2560, height: 1440 }), + ], + ); + + assert.deepEqual(geometry, { + x: 1920, + y: 0, + width: 2560, + height: 1440, + }); +}); + +test('resolveHyprlandWindowGeometry uses monitor bounds for client-requested fullscreen', () => { + const geometry = resolveHyprlandWindowGeometry( + makeClient({ + at: [0, 28], + size: [1920, 1052], + monitor: 0, + fullscreen: 0, + fullscreenClient: 2, + }), + [makeMonitor({ id: 0, x: 0, y: 0, width: 1920, height: 1080 })], + ); + + assert.deepEqual(geometry, { + x: 0, + y: 0, + width: 1920, + height: 1080, + }); +}); diff --git a/src/window-trackers/hyprland-tracker.ts b/src/window-trackers/hyprland-tracker.ts index 64d84397..867b757e 100644 --- a/src/window-trackers/hyprland-tracker.ts +++ b/src/window-trackers/hyprland-tracker.ts @@ -20,6 +20,7 @@ import * as net from 'net'; import { execSync } from 'child_process'; import { BaseWindowTracker } from './base-tracker'; import { createLogger } from '../logger'; +import type { WindowGeometry } from '../types'; const log = createLogger('tracker').child('hyprland'); @@ -29,11 +30,22 @@ export interface HyprlandClient { initialClass?: string; at: [number, number]; size: [number, number]; + monitor?: number; + fullscreen?: number; + fullscreenClient?: number; pid?: number; mapped?: boolean; hidden?: boolean; } +export interface HyprlandMonitor { + id: number; + x: number; + y: number; + width: number; + height: number; +} + interface SelectHyprlandMpvWindowOptions { targetMpvSocketPath: string | null; activeWindowAddress: string | null; @@ -124,7 +136,12 @@ export function parseHyprctlClients(output: string): HyprlandClient[] | null { return null; } - const parsed = JSON.parse(jsonPayload) as unknown; + let parsed: unknown; + try { + parsed = JSON.parse(jsonPayload) as unknown; + } catch { + return null; + } if (!Array.isArray(parsed)) { return null; } @@ -132,8 +149,76 @@ export function parseHyprctlClients(output: string): HyprlandClient[] | null { return parsed as HyprlandClient[]; } +export function parseHyprctlMonitors(output: string): HyprlandMonitor[] | null { + const jsonPayload = extractHyprctlJsonPayload(output); + if (!jsonPayload) { + return null; + } + + let parsed: unknown; + try { + parsed = JSON.parse(jsonPayload) as unknown; + } catch { + return null; + } + if (!Array.isArray(parsed)) { + return null; + } + + return parsed as HyprlandMonitor[]; +} + +function isHyprlandFullscreenClient(client: HyprlandClient): boolean { + return (client.fullscreen ?? 0) > 0 || (client.fullscreenClient ?? 0) > 0; +} + +export function resolveHyprlandWindowGeometry( + client: HyprlandClient, + monitors: HyprlandMonitor[] | null, +): WindowGeometry { + if (isHyprlandFullscreenClient(client) && typeof client.monitor === 'number') { + const monitor = monitors?.find((candidate) => candidate.id === client.monitor); + if (monitor) { + return { + x: monitor.x, + y: monitor.y, + width: monitor.width, + height: monitor.height, + }; + } + } + + return { + x: client.at[0], + y: client.at[1], + width: client.size[0], + height: client.size[1], + }; +} + +export function isHyprlandGeometryEvent(name: string): boolean { + return ( + name === 'movewindow' || + name === 'movewindowv2' || + name === 'resizewindow' || + name === 'resizewindowv2' || + name === 'openwindow' || + name === 'closewindow' || + name === 'fullscreen' || + name === 'fullscreenv2' || + name === 'changefloatingmode' || + name === 'workspace' || + name === 'workspacev2' || + name === 'focusedmon' || + name === 'monitoradded' || + name === 'monitoraddedv2' || + name === 'monitorremoved' + ); +} + export class HyprlandWindowTracker extends BaseWindowTracker { private pollInterval: ReturnType | null = null; + private pollTimeouts: Array> = []; private eventSocket: net.Socket | null = null; private readonly targetMpvSocketPath: string | null; private activeWindowAddress: string | null = null; @@ -154,6 +239,10 @@ export class HyprlandWindowTracker extends BaseWindowTracker { clearInterval(this.pollInterval); this.pollInterval = null; } + for (const timeout of this.pollTimeouts) { + clearTimeout(timeout); + } + this.pollTimeouts = []; if (this.eventSocket) { this.eventSocket.destroy(); this.eventSocket = null; @@ -200,6 +289,9 @@ export class HyprlandWindowTracker extends BaseWindowTracker { } const [name, rawData = ''] = trimmedEvent.split('>>', 2); + if (!name) { + return; + } const data = rawData.trim(); if (name === 'activewindowv2') { @@ -212,17 +304,24 @@ export class HyprlandWindowTracker extends BaseWindowTracker { this.activeWindowAddress = null; } - if ( - name === 'movewindow' || - name === 'movewindowv2' || - name === 'windowtitle' || - name === 'windowtitlev2' || - name === 'openwindow' || - name === 'closewindow' || - name === 'fullscreen' || - name === 'changefloatingmode' - ) { - this.pollGeometry(); + if (isHyprlandGeometryEvent(name)) { + this.scheduleGeometryPollBurst(); + } + } + + private scheduleGeometryPollBurst(): void { + for (const timeout of this.pollTimeouts) { + clearTimeout(timeout); + } + this.pollTimeouts = [0, 50, 150, 300].map((delayMs) => { + const pollTimeout = setTimeout(() => { + this.pollTimeouts = this.pollTimeouts.filter((timeout) => timeout !== pollTimeout); + this.pollGeometry(); + }, delayMs); + return pollTimeout; + }); + for (const pollTimeout of this.pollTimeouts) { + pollTimeout.unref?.(); } } @@ -237,12 +336,9 @@ export class HyprlandWindowTracker extends BaseWindowTracker { const mpvWindow = this.findTargetWindow(clients); if (mpvWindow) { - this.updateGeometry({ - x: mpvWindow.at[0], - y: mpvWindow.at[1], - width: mpvWindow.size[0], - height: mpvWindow.size[1], - }); + this.updateGeometry( + resolveHyprlandWindowGeometry(mpvWindow, this.getHyprlandMonitors(mpvWindow)), + ); } else { this.updateGeometry(null); } @@ -259,6 +355,19 @@ export class HyprlandWindowTracker extends BaseWindowTracker { }); } + private getHyprlandMonitors(client: HyprlandClient): HyprlandMonitor[] | null { + if (!isHyprlandFullscreenClient(client)) { + return null; + } + + try { + const output = execSync('hyprctl -j monitors', { encoding: 'utf-8' }); + return parseHyprctlMonitors(output); + } catch { + return null; + } + } + private getWindowCommandLine(pid: number): string | null { const commandLine = execSync(`ps -p ${pid} -o args=`, { encoding: 'utf-8', diff --git a/stats/src/App.tsx b/stats/src/App.tsx index aa5a96b2..e70e0968 100644 --- a/stats/src/App.tsx +++ b/stats/src/App.tsx @@ -127,7 +127,7 @@ export function App() { ); return ( -
+
-
+
{mediaDetail ? ( }> { + assert.match(css, /html,\s*body,\s*#root\s*\{[^}]*height:\s*100%;/s); + assert.match(css, /body\.overlay-mode\s*\{[^}]*background-color:\s*var\(--color-ctp-base\);/s); + assert.doesNotMatch(css, /body\.overlay-mode\s*\{[^}]*rgba\(/s); + assert.match( + css, + /body\.overlay-mode #root\s*\{[^}]*background-color:\s*var\(--color-ctp-base\);/s, + ); +});