9 Commits

Author SHA1 Message Date
a34a7489db chore: prep v0.5.5 release 2026-03-09 18:07:01 -07:00
e59192bbe1 Improve startup dictionary progress and fix overlay/plugin input handlin
- show a dedicated startup OSD "building" phase for character dictionary sync
- forward bare `Tab` from visible overlay to mpv so AniSkip works while focused
- fix Windows plugin env override resolution for `SUBMINER_BINARY_PATH`
2026-03-09 02:35:03 -07:00
e0f82d28f0 Improve startup dictionary sync UX and default playback keybindings
- Add default `f` fullscreen overlay binding and switch default AniSkip skip key to `Tab`
- Make character-dictionary auto-sync non-blocking at startup with tokenization gating for Yomitan mutations
- Add ordered startup OSD progress (checking/generating/updating/importing), refresh current subtitle on sync completion, and extend regression tests
2026-03-09 00:50:32 -07:00
a0521aeeaf fix(tracker): follow active hyprland and visible x11 windows 2026-03-08 23:03:55 -07:00
2127f759ca fix: correct guessit title parsing for character dictionary sync 2026-03-08 23:03:48 -07:00
5e787183d0 cleanup docs 2026-03-08 23:03:08 -07:00
81ca31b899 remove release notes 2026-03-08 22:15:21 -07:00
e2a7597b4f update README 2026-03-08 22:10:09 -07:00
2e59c21078 chore: prep v0.5.3 release 2026-03-08 22:08:46 -07:00
66 changed files with 3964 additions and 488 deletions

View File

@@ -10,7 +10,6 @@ concurrency:
cancel-in-progress: false
permissions:
actions: read
contents: write
jobs:
@@ -220,27 +219,6 @@ jobs:
restore-keys: |
${{ runner.os }}-bun-
- name: Validate Windows signing secrets
shell: bash
run: |
missing=0
for name in SIGNPATH_API_TOKEN SIGNPATH_ORGANIZATION_ID SIGNPATH_PROJECT_SLUG SIGNPATH_SIGNING_POLICY_SLUG SIGNPATH_ARTIFACT_CONFIGURATION_SLUG; do
if [ -z "${!name}" ]; then
echo "Missing required secret: $name"
missing=1
fi
done
if [ "$missing" -ne 0 ]; then
echo "Set the SignPath Windows signing secrets and rerun."
exit 1
fi
env:
SIGNPATH_API_TOKEN: ${{ secrets.SIGNPATH_API_TOKEN }}
SIGNPATH_ORGANIZATION_ID: ${{ secrets.SIGNPATH_ORGANIZATION_ID }}
SIGNPATH_PROJECT_SLUG: ${{ secrets.SIGNPATH_PROJECT_SLUG }}
SIGNPATH_SIGNING_POLICY_SLUG: ${{ secrets.SIGNPATH_SIGNING_POLICY_SLUG }}
SIGNPATH_ARTIFACT_CONFIGURATION_SLUG: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}
- name: Install dependencies
run: bun install --frozen-lockfile
@@ -252,99 +230,17 @@ jobs:
bun run build
- name: Build unsigned Windows artifacts
run: bun run build:win
run: bun run build:win:unsigned
- name: Upload unsigned Windows artifact for SignPath
id: upload-unsigned-windows-artifact
- name: Upload Windows artifacts
uses: actions/upload-artifact@v4
with:
name: unsigned-windows
name: windows
path: |
release/*.exe
release/*.zip
if-no-files-found: error
- name: Submit Windows signing request (attempt 1)
id: signpath-sign-attempt-1
continue-on-error: true
uses: signpath/github-action-submit-signing-request@v2
with:
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
organization-id: ${{ secrets.SIGNPATH_ORGANIZATION_ID }}
project-slug: ${{ secrets.SIGNPATH_PROJECT_SLUG }}
signing-policy-slug: ${{ secrets.SIGNPATH_SIGNING_POLICY_SLUG }}
artifact-configuration-slug: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}
github-artifact-id: ${{ steps.upload-unsigned-windows-artifact.outputs.artifact-id }}
wait-for-completion: true
output-artifact-directory: signed-windows-attempt-1
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Submit Windows signing request (attempt 2)
id: signpath-sign-attempt-2
if: steps.signpath-sign-attempt-1.outcome == 'failure'
continue-on-error: true
uses: signpath/github-action-submit-signing-request@v2
with:
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
organization-id: ${{ secrets.SIGNPATH_ORGANIZATION_ID }}
project-slug: ${{ secrets.SIGNPATH_PROJECT_SLUG }}
signing-policy-slug: ${{ secrets.SIGNPATH_SIGNING_POLICY_SLUG }}
artifact-configuration-slug: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}
github-artifact-id: ${{ steps.upload-unsigned-windows-artifact.outputs.artifact-id }}
wait-for-completion: true
output-artifact-directory: signed-windows-attempt-2
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Submit Windows signing request (attempt 3)
id: signpath-sign-attempt-3
if: steps.signpath-sign-attempt-1.outcome == 'failure' && steps.signpath-sign-attempt-2.outcome == 'failure'
continue-on-error: true
uses: signpath/github-action-submit-signing-request@v2
with:
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
organization-id: ${{ secrets.SIGNPATH_ORGANIZATION_ID }}
project-slug: ${{ secrets.SIGNPATH_PROJECT_SLUG }}
signing-policy-slug: ${{ secrets.SIGNPATH_SIGNING_POLICY_SLUG }}
artifact-configuration-slug: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}
github-artifact-id: ${{ steps.upload-unsigned-windows-artifact.outputs.artifact-id }}
wait-for-completion: true
output-artifact-directory: signed-windows-attempt-3
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Fail when all SignPath signing attempts fail
if: steps.signpath-sign-attempt-1.outcome == 'failure' && steps.signpath-sign-attempt-2.outcome == 'failure' && steps.signpath-sign-attempt-3.outcome == 'failure'
shell: bash
run: |
echo "All SignPath signing attempts failed; rerun the workflow when SignPath is healthy."
exit 1
- name: Upload signed Windows artifacts (attempt 1)
if: steps.signpath-sign-attempt-1.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: windows
path: |
signed-windows-attempt-1/*.exe
signed-windows-attempt-1/*.zip
- name: Upload signed Windows artifacts (attempt 2)
if: steps.signpath-sign-attempt-2.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: windows
path: |
signed-windows-attempt-2/*.exe
signed-windows-attempt-2/*.zip
- name: Upload signed Windows artifacts (attempt 3)
if: steps.signpath-sign-attempt-3.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: windows
path: |
signed-windows-attempt-3/*.exe
signed-windows-attempt-3/*.zip
release:
needs: [build-linux, build-macos, build-windows]
runs-on: ubuntu-latest

1
.gitignore vendored
View File

@@ -37,3 +37,4 @@ tests/*
.worktrees/
.codex/*
.agents/*
favicon.png

View File

@@ -1,16 +1,44 @@
# Changelog
## v0.5.5 (2026-03-09)
### Changed
- Overlay: Added `f` as the default overlay fullscreen toggle and changed the default AniSkip intro-jump key to `Tab`.
- Dictionary: Aligned AniList character dictionary generation more closely with the upstream reference by preserving duplicate shared names across characters, skipping characters without native Japanese names, restoring richer character info fields, and using upstream-style role mapping plus hint-aware kanji readings.
- Startup: Ordered startup OSD messages so tokenization loads first, annotation loading appears next if still pending, and character dictionary sync progress waits until annotation loading finishes.
- Dictionary: Added a visible startup OSD step for merged character-dictionary building so long rebuilds show progress before the later import/upload phase.
### Fixed
- Dictionary: Fixed AniList media guessing for character dictionary auto-sync by using filename-only `guessit` input and preserving multi-part guessit titles instead of truncating them to the first segment.
- Dictionary: Refresh the current subtitle after character dictionary auto-sync completes so newly imported character names highlight on the active line instead of waiting for the next subtitle change.
- Dictionary: Show character dictionary auto-sync progress on the mpv OSD without sending desktop notifications.
- Dictionary: Keep character dictionary auto-sync non-blocking during startup by letting snapshot/build work run in parallel and delaying only the Yomitan import/settings phase until current-media tokenization is already ready.
- Overlay: Fixed visible overlay keyboard handling so pressing `Tab` still reaches mpv and triggers the default AniSkip skip-intro binding while the overlay has focus.
- Plugin: Fix Windows mpv plugin binary override lookup so `SUBMINER_BINARY_PATH` still resolves to `SubMiner.exe` when no AppImage override is set.
## v0.5.3 (2026-03-09)
### Changed
- Release: Publish unsigned Windows `.exe` and `.zip` artifacts directly from release CI instead of routing them through SignPath.
- Release: Added `bun run build:win:unsigned` for explicit local unsigned Windows packaging.
## v0.5.2 (2026-03-09)
### Internal
- Release: Pinned the Windows SignPath submission workflow to an explicit artifact-configuration slug instead of relying on the SignPath project's default configuration.
## v0.5.1 (2026-03-09)
### Changed
- Launcher: Removed the YouTube subtitle generation mode switch so YouTube playback always preloads subtitles before mpv starts.
### Fixed
- Launcher: Hardened YouTube AI subtitle fixing so fenced SRT output and text-only one-cue-per-block responses can still be applied without losing original cue timing.
- Launcher: Skipped AniSkip lookup during URL playback and YouTube subtitle-preload playback, limiting AniSkip to local file targets where it can actually resolve anime metadata.
- Launcher: Keep the background SubMiner process running after a launcher-managed mpv session exits so the next mpv instance can reconnect without restarting the app.
@@ -18,6 +46,7 @@
- Windows: Acquire the app single-instance lock earlier so Windows overlay/video launches reuse the running background SubMiner process instead of booting a second full app and repeating startup warmups.
## v0.3.0 (2026-03-05)
- Added keyboard-driven Yomitan navigation and popup controls, including optional auto-pause.
- Added subtitle/jump keyboard handling fixes for smoother subtitle playback control.
- Improved Anki/Yomitan reliability with stronger Yomitan proxy syncing and safer extension refresh logic.
@@ -28,6 +57,7 @@
- Removed docs Plausible integration and cleaned associated tracker settings.
## v0.2.3 (2026-03-02)
- Added performance and tokenization optimizations (faster warmup, persistent MeCab usage, reduced enrichment lookups).
- Added subtitle controls for no-jump delay shifts.
- Improved subtitle highlight logic with priority and reliability fixes.
@@ -36,30 +66,36 @@
- Updated startup flow to load dictionaries asynchronously and unblock first tokenization sooner.
## v0.2.2 (2026-03-01)
- Improved subtitle highlighting reliability for frequency modes.
- Fixed Jellyfin misc info formatting cleanup.
- Version bump maintenance for 0.2.2.
## v0.2.1 (2026-03-01)
- Delivered Jellyfin and Subsync fixes from release patch cycle.
- Version bump maintenance for 0.2.1.
## v0.2.0 (2026-03-01)
- Added task-related release work for the overlay 2.0 cycle.
- Introduced Overlay 2.0.
- Improved release automation reliability.
## v0.1.2 (2026-02-24)
- Added encrypted AniList token handling and default GNOME keyring support.
- Added launcher passthrough for password-store flows (Jellyfin path).
- Updated docs for auth and integration behavior.
- Version bump maintenance for 0.1.2.
## v0.1.1 (2026-02-23)
- Fixed overlay modal focus handling (`grab input`) behavior.
- Version bump maintenance for 0.1.1.
## v0.1.0 (2026-02-23)
- Bootstrapped Electron runtime, services, and composition model.
- Added runtime asset packaging and dependency vendoring.
- Added project docs baseline, setup guides, architecture notes, and submodule/runtime assets.

View File

@@ -20,8 +20,6 @@
<br />
Initial packaged Windows support is now available alongside the existing Linux and macOS builds.
## What it does
SubMiner is an Electron overlay that sits on top of mpv. It turns your video player into a full sentence-mining workstation:
@@ -31,7 +29,7 @@ SubMiner is an Electron overlay that sits on top of mpv. It turns your video pla
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave (`subtitleStyle.autoPauseVideoOnHover`)
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
@@ -71,7 +69,7 @@ SubMiner.AppImage
On macOS, launch `SubMiner.app`. On Windows, launch `SubMiner.exe` from the Start menu or install directory.
On first launch, SubMiner now:
On first launch, SubMiner:
- starts in the tray/background
- creates the default config directory and `config.jsonc`
@@ -79,8 +77,6 @@ On first launch, SubMiner now:
- can install the mpv plugin to the default mpv scripts location for you
- links directly to Yomitan settings so you can install dictionaries before finishing setup
Existing installs that already have a valid config plus at least one Yomitan dictionary are auto-detected as complete and will not be re-prompted.
### 3. Finish setup
- click `Install mpv plugin` if you want the default plugin auto-start flow
@@ -114,20 +110,9 @@ Windows builds use native window tracking and do not require the Linux composito
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
## Testing
- Run `bun run test` or `bun run test:fast` for the default fast lane: config/core coverage plus representative entry/runtime, Anki integration, and main runtime checks.
- Run `bun run test:full` for the maintained test surface: Bun-compatible `src/**` coverage, Bun-compatible launcher unit coverage, and the maintained dist compatibility slice for `ipc`, `anki-jimaku-ipc`, `overlay-manager`, `config-validation`, `startup-config`, and runtime registry coverage.
- Run `bun run test:node:compat` directly when you only need that dist compatibility slice. The command name is legacy; it now runs under Bun.
- Run `bun run test:env` for environment-specific verification: launcher smoke/plugin checks plus the SQLite-backed immersion tracker lane.
- Run `bun run test:immersion:sqlite` when you specifically need the dist SQLite persistence coverage.
- Run `bun run test:subtitle` for the maintained `alass`/`ffsubsync` subtitle surface.
The Bun-managed discovery lanes intentionally exclude a small set of suites from the source-file discovery pass and keep them in the maintained dist compatibility slice instead: Electron-focused tests in `src/core/services/ipc.test.ts`, `src/core/services/anki-jimaku-ipc.test.ts`, and `src/core/services/overlay-manager.test.ts`, plus runtime/config tests in `src/main/config-validation.test.ts`, `src/main/runtime/startup-config.test.ts`, and `src/main/runtime/registry.test.ts`. `bun run test:node:compat` keeps those suites in the standard workflow instead of leaving them untracked.
## Acknowledgments
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan), and JLPT tags from [yomitan-jlpt-vocab](https://github.com/stephenmk/yomitan-jlpt-vocab).
## License

View File

@@ -0,0 +1,80 @@
---
id: TASK-131
title: Make default overlay fullscreen and AniSkip end-jump keybindings easier to reach
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:30'
labels:
- enhancement
- overlay
- mpv
- aniskip
dependencies: []
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Make two default keyboard actions easier to hit during playback: add `f` as the built-in overlay fullscreen toggle, and make AniSkip's default intro-end jump use `Tab`.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Default overlay keybindings include `KeyF` mapped to mpv fullscreen toggle.
- [x] #2 Default AniSkip hint/button key defaults to `Tab` and the plugin registers that binding.
- [x] #3 Automated regression coverage exists for both default bindings.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a failing TypeScript regression proving default overlay keybindings include fullscreen on `KeyF`.
2. Add a failing Lua/plugin regression proving AniSkip defaults to `Tab`, updates the OSD hint text, and registers the expected keybinding.
3. Patch the default keybinding/config values with minimal behavior changes and keep fallback binding behavior intentional.
4. Run focused tests plus touched verification commands, then record results and a short changelog fragment.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added `KeyF -> ['cycle', 'fullscreen']` to the built-in overlay keybindings in `src/config/definitions/shared.ts`.
Changed the mpv plugin AniSkip default button key from `y-k` to `TAB` in both the runtime default options and the shipped `plugin/subminer.conf`. The AniSkip OSD hint now also falls back to `TAB` when no explicit key is configured.
Adjusted `plugin/subminer/ui.lua` fallback registration so the legacy `y-k` binding is only added for custom non-default AniSkip bindings, instead of always shadowing the new default.
Extended regression coverage:
- `src/config/definitions/domain-registry.test.ts` now asserts the default fullscreen binding on `KeyF`.
- `scripts/test-plugin-start-gate.lua` now isolates plugin runs correctly, records keybinding/observer registration, and asserts the default AniSkip keybinding/prompt behavior for `TAB`.
Verification:
- `bun test src/config/definitions/domain-registry.test.ts`
- `bun run test:config:src`
- `lua scripts/test-plugin-start-gate.lua`
- `bun run changelog:lint`
- `bun run typecheck`
Known unrelated verification gap:
- `bun run test:plugin:src` still fails in `scripts/test-plugin-binary-windows.lua` on this Linux host (`windows env override should resolve .exe suffix`), outside the keybinding changes in this task.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Default overlay playback now has an easier fullscreen toggle on `f`, and AniSkip's default intro-end jump now uses `Tab`. The mpv plugin hint text and registration logic were updated to match the new default, while keeping legacy `y-k` fallback behavior limited to custom non-default bindings.
Regression coverage was added for both defaults, and the plugin test harness now resets plugin bootstrap state between scenarios so keybinding assertions can run reliably.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,62 @@
---
id: TASK-138
title: Publish unsigned Windows release artifacts and add local unsigned build script
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:00'
labels:
- release
- windows
dependencies: []
references:
- .github/workflows/release.yml
- package.json
- src/release-workflow.test.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Stop the tag-driven release workflow from depending on SignPath and publish unsigned Windows `.exe` and `.zip` artifacts directly. Add an explicit local `build:win:unsigned` script without changing the existing `build:win` command.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Windows release CI builds unsigned artifacts without requiring SignPath secrets.
- [x] #2 The Windows release job uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact.
- [x] #3 The repo exposes a local `build:win:unsigned` script for explicit unsigned Windows packaging.
- [x] #4 Regression coverage fails if the workflow reintroduces SignPath submission or drops the unsigned script.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Update workflow regression tests to assert unsigned Windows release behavior and the new local script.
2. Patch `package.json` to add `build:win:unsigned`.
3. Patch `.github/workflows/release.yml` to build unsigned Windows artifacts and upload them directly.
4. Add the release changelog fragment and run focused verification.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Removed the Windows SignPath secret validation and submission steps from `.github/workflows/release.yml`. The Windows release job now runs `bun run build:win:unsigned` and uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact consumed by the release job.
Added `scripts/build-win-unsigned.mjs` plus the `build:win:unsigned` package script. The wrapper clears Windows code-signing environment variables and disables identity auto-discovery before invoking `electron-builder`, so release CI stays unsigned even if signing credentials are configured elsewhere.
Updated `src/release-workflow.test.ts` to assert the unsigned workflow contract and added the release changelog fragment in `changes/unsigned-windows-release-builds.md`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Windows release CI now publishes unsigned artifacts directly and no longer depends on SignPath. Local developers also have an explicit `bun run build:win:unsigned` path for unsigned packaging without changing the existing `build:win` command.
Verification:
- `bun test src/release-workflow.test.ts`
- `bun run typecheck`
- `node --check scripts/build-win-unsigned.mjs`
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,57 @@
---
id: TASK-139
title: Cut patch release v0.5.3 for unsigned Windows release builds
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:00'
labels:
- release
- patch
dependencies:
- TASK-138
references:
- package.json
- CHANGELOG.md
- release/release-notes.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Publish a patch release from the unsigned Windows release-build change by bumping the app version, generating committed changelog artifacts for `v0.5.3`, and pushing the release-prep commit.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Repository version metadata is updated to `0.5.3`.
- [x] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.3` section and consumed fragments are removed.
- [x] #3 New `v0.5.3` release-prep commit is pushed to `origin/main`.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Bump `package.json` from `0.5.2` to `0.5.3`.
2. Run `bun run changelog:build` so committed changelog artifacts match the new patch version.
3. Run changelog/typecheck/test verification.
4. Commit the release-prep change set and push `main`.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Bumped `package.json` from `0.5.2` to `0.5.3`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.3` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/unsigned-windows-release-builds.md` fragment.
Verification before push: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, and `bun run test:fast`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared patch release `v0.5.3` so the unsigned Windows release-build change is captured in committed release metadata on `main`. Version metadata, changelog output, and release notes are aligned with the new patch version.
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, `bun run test:fast`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,38 @@
---
id: TASK-140
title: Fix guessit title parsing for character dictionary sync
status: Done
assignee: []
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:25'
labels:
- dictionary
- anilist
- bug
- guessit
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-updater.ts
- /home/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-updater.test.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Fix AniList character dictionary auto-sync for filenames where `guessit` misparses the full path and our title extraction keeps only the first array segment, causing AniList resolution to match the wrong anime and abort merged dictionary refresh.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 AniList media guessing passes basename-only targets to `guessit` so parent folder names do not corrupt series title detection.
- [x] #2 Guessit title arrays are combined into one usable title instead of truncating to the first segment.
- [x] #3 Regression coverage includes the Bunny Girl Senpai filename shape that previously resolved to the wrong AniList entry.
- [x] #4 Verification confirms the targeted AniList guessing tests pass.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Root repro: `guessit` parsed the Bunny Girl Senpai full path as `title: ["Rascal", "Does-not-Dream-of-Bunny-Girl-Senapi"]`, and our `firstString` helper kept only `Rascal`, which resolved to AniList 3490 (`rayca`) and produced zero character results. Fixed by sending basename-only input to `guessit` and joining multi-part guessit title arrays.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,36 @@
---
id: TASK-141
title: Refresh current subtitle after character dictionary sync completes
status: Done
assignee: []
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:55'
labels:
- dictionary
- overlay
- bug
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
When character dictionary auto-sync finishes after startup tokenization, invalidate cached subtitle tokenization and refresh the current subtitle so character-name highlighting catches up without waiting for the next subtitle line.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Successful character dictionary sync exposes a completion hook for main runtime follow-up.
- [x] #2 Main runtime clears Yomitan parser caches and refreshes the current subtitle after sync completion.
- [x] #3 Regression coverage verifies the sync completion callback fires on successful sync.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Observed on Bunny Girl Senpai startup: autoplay/tokenization became ready around 8s, but snapshot/import/state write completed roughly 31s after launch, leaving the current subtitle tokenized without the newly imported character dictionary. Fixed by adding an auto-sync completion hook that clears parser caches and refreshes the current subtitle.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-142
title: Show character dictionary auto-sync progress on OSD
status: Done
assignee: []
created_date: '2026-03-09 01:10'
updated_date: '2026-03-09 01:10'
labels:
- dictionary
- overlay
- ux
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync-notifications.ts
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
When character dictionary auto-sync runs for a newly opened anime, surface progress so users know why character-name lookup/highlighting is temporarily unavailable via the mpv OSD without desktop notification popups.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Character dictionary auto-sync emits progress events for syncing, importing, ready, and failure states.
- [x] #2 Main runtime routes those progress events through OSD notifications without desktop notifications.
- [x] #3 Regression coverage verifies progress events and notification routing behavior.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
OSD now shows auto-sync phase changes while the dictionary updates. Desktop notifications were removed for this path to avoid startup popup spam.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-143
title: Keep character dictionary auto-sync non-blocking during startup
status: Done
assignee: []
created_date: '2026-03-09 01:45'
updated_date: '2026-03-09 01:45'
labels:
- dictionary
- startup
- performance
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/current-media-tokenization-gate.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Keep character dictionary auto-sync running in parallel during startup without delaying playback. Only tokenization readiness should gate playback; character dictionary import/settings updates should wait until tokenization is already ready and then refresh annotations afterward.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Character dictionary snapshot/build work can run immediately during startup.
- [x] #2 Yomitan dictionary mutation work waits until current-media tokenization is ready.
- [x] #3 Regression coverage verifies auto-sync builds before the gate and only mutates Yomitan after the gate resolves.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added a small current-media tokenization gate in main runtime. Media changes reset the gate, the first tokenization-ready event marks it ready, and auto-sync now waits on that gate only before Yomitan dictionary inspection/import/settings updates. Snapshot generation and merged ZIP build still run immediately in parallel.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,44 @@
---
id: TASK-144
title: Sequence startup OSD notifications for tokenization, annotations, and character dictionary sync
status: Done
assignee: []
created_date: '2026-03-09 10:40'
updated_date: '2026-03-09 10:40'
labels:
- startup
- overlay
- ux
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/subtitle-tokenization-main-deps.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync-notifications.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Keep startup OSD progress ordered. While tokenization is still pending, only show the tokenization loading message. After tokenization becomes ready, show annotation loading if annotation warmup still remains. Only surface character dictionary auto-sync progress after annotation loading clears, and only if the dictionary work is still active.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Character dictionary progress stays hidden while tokenization startup loading is still active.
- [x] #2 Annotation loading OSD appears after tokenization readiness and before any later character dictionary progress.
- [x] #3 Regression coverage verifies buffered dictionary progress/failure ordering during startup.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added a small startup OSD sequencer in main runtime. Annotation warmup OSD now flows through that sequencer, and character dictionary sync notifications buffer until tokenization plus annotation loading clear. Buffered `ready` updates are dropped if dictionary progress finished before it ever became visible, while buffered failures still surface after annotation loading completes.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-145
title: Show character dictionary build progress on startup OSD before import
status: Done
assignee: []
created_date: '2026-03-09 11:20'
updated_date: '2026-03-09 11:20'
labels:
- startup
- dictionary
- ux
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.test.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Surface an explicit character-dictionary build phase on startup OSD so there is visible progress between subtitle annotation loading and the later import/upload step when merged dictionary generation is still running.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Auto-sync emits a dedicated in-flight status while merged dictionary generation is running.
- [x] #2 Startup OSD sequencing treats that build phase as progress and can surface it after annotation loading clears.
- [x] #3 Regression coverage verifies the build phase is emitted before import begins.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added a `building` progress phase before `buildMergedDictionary(...)` and included it in the startup OSD sequencer's buffered progress set. This gives startup a visible dictionary-progress step even when snapshot checking/generation finished too early to still be relevant by the time annotation loading completes.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,46 @@
---
id: TASK-145
title: Show checking and generation OSD for character dictionary auto-sync
status: Done
assignee: []
created_date: '2026-03-09 11:20'
updated_date: '2026-03-09 11:20'
labels:
- dictionary
- overlay
- ux
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Surface an immediate startup OSD that the character dictionary is being checked, and show a distinct generating message only when the current AniList media actually needs a fresh snapshot build instead of reusing a cached one.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Auto-sync emits a `checking` progress event before snapshot resolution completes.
- [x] #2 Auto-sync emits `generating` only for snapshot cache misses and keeps `updating`/`importing` as later phases.
- [x] #3 Startup OSD sequencing still prioritizes tokenization then annotation loading before buffered dictionary progress.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Character dictionary auto-sync now emits `Checking character dictionary...` as soon as the AniList media is resolved, then emits `Generating character dictionary...` only when the snapshot layer misses and a real rebuild begins. Cached snapshots skip the generating phase and continue straight into the later update/import flow.
Wired those progress callbacks through the character-dictionary runtime boundary, updated the startup OSD sequencer to treat checking/generating as dictionary-progress phases with the same tokenization and annotation precedence, and added regression coverage for cache-hit vs cache-miss behavior plus buffered startup ordering.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,66 @@
---
id: TASK-146
title: Forward overlay Tab to mpv for AniSkip
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:00'
labels:
- bug
- overlay
- aniskip
- linux
dependencies: []
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Fix visible-overlay keyboard handling so bare `Tab` is forwarded to mpv instead of being consumed by Electron focus navigation. This restores the default AniSkip `TAB` binding while the overlay has focus, especially on Linux.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Visible overlay forwards bare `Tab` to mpv as `keypress TAB`.
- [x] #2 Modal overlays keep their existing local `Tab` behavior.
- [x] #3 Automated regression coverage exists for the input handler and overlay factory wiring.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a failing regression around visible-overlay `before-input-event` handling for bare `Tab`.
2. Add/extend overlay factory tests so the new mpv-forward callback is wired through runtime construction.
3. Patch overlay input handling to intercept visible-overlay `Tab` and send mpv `keypress TAB`.
4. Run focused overlay tests, typecheck, and changelog validation.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Extracted visible-overlay input handling into `src/core/services/overlay-window-input.ts` so the `Tab` forwarding decision can be unit tested without loading Electron window primitives.
Visible overlay `before-input-event` now intercepts bare `Tab`, prevents the browser default, and forwards mpv `keypress TAB` through the existing mpv runtime command path. Modal overlays remain unchanged.
Verification:
- `bun test src/core/services/overlay-window.test.ts src/main/runtime/overlay-window-factory.test.ts src/main/runtime/overlay-window-factory-main-deps.test.ts src/main/runtime/overlay-window-runtime-handlers.test.ts`
- `bun x tsc --noEmit`
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Visible overlay focus no longer blocks the default AniSkip `Tab` binding. Bare `Tab` is now forwarded straight to mpv while the visible overlay is active, and modal overlays still retain their own normal focus behavior.
Added regression coverage for both the input-routing decision and the runtime plumbing that carries the new mpv forwarder into overlay window creation.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,45 @@
---
id: TASK-148
title: Fix Windows plugin env binary override resolution
status: Done
assignee:
- codex
created_date: '2026-03-09 00:00'
updated_date: '2026-03-09 00:00'
labels:
- windows
- plugin
- regression
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Fix the mpv plugin's Windows binary override lookup so `SUBMINER_BINARY_PATH` still resolves when `SUBMINER_APPIMAGE_PATH` is unset. The current Lua resolver builds an array with a leading `nil`, which causes `ipairs` iteration to stop before the later Windows override candidate.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 `scripts/test-plugin-binary-windows.lua` passes the env override regression that expects `.exe` suffix resolution from `SUBMINER_BINARY_PATH`.
- [x] #2 Existing plugin start/binary test gate stays green after the fix.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Updated `plugin/subminer/binary.lua` so env override lookup checks `SUBMINER_APPIMAGE_PATH` and `SUBMINER_BINARY_PATH` sequentially instead of via a Lua array literal that truncates at the first `nil`. This restores Windows `.exe` suffix resolution for `SUBMINER_BINARY_PATH` when the AppImage env var is unset.
Verification:
- `lua scripts/test-plugin-binary-windows.lua`
- `bun run test:plugin:src`
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,71 @@
---
id: TASK-149
title: Cut patch release v0.5.5 for character dictionary updates and release guarding
status: Done
assignee:
- codex
created_date: '2026-03-09 01:10'
updated_date: '2026-03-09 01:14'
labels:
- release
- patch
dependencies:
- TASK-140
- TASK-141
- TASK-142
- TASK-143
- TASK-144
- TASK-145
- TASK-146
- TASK-148
references:
- package.json
- CHANGELOG.md
- scripts/build-changelog.ts
- scripts/build-changelog.test.ts
- docs/RELEASING.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Prepare and publish patch release `v0.5.5` after the failed `v0.5.4` tag by aligning package version metadata, generating committed changelog output from the pending release fragments, and hardening release validation so a future tag cannot ship with a mismatched `package.json` version.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Repository version metadata is updated to `0.5.5`.
- [x] #2 `CHANGELOG.md` contains the committed `v0.5.5` section and the consumed fragments are removed.
- [x] #3 Release validation rejects a requested release version when it differs from `package.json`.
- [x] #4 Release docs capture the required version/changelog prep before tagging.
- [x] #5 New `v0.5.5` release-prep commit and tag are pushed to `origin/main`.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a regression test for tagged-release/package version mismatch.
2. Update changelog validation to reject mismatched explicit release versions.
3. Bump `package.json`, generate committed `v0.5.5` changelog output, and remove consumed fragments.
4. Add a short `docs/RELEASING.md` checklist for the prep flow.
5. Run release verification, commit, tag, and push.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added a regression test in `scripts/build-changelog.test.ts` that proves `changelog:check --version ...` rejects tag/package mismatches. Updated `scripts/build-changelog.ts` so tagged release validation now compares the explicit requested version against `package.json` before looking for pending fragments or the committed changelog section.
Bumped `package.json` from `0.5.3` to `0.5.5`, ran `bun run changelog:build --version 0.5.5 --date 2026-03-09`, and committed the generated `CHANGELOG.md` output while removing the consumed task fragments. Added `docs/RELEASING.md` with the required release-prep checklist so version bump + changelog generation happen before tagging.
Verification: `bun run changelog:lint`, `bun run changelog:check --version 0.5.5`, `bun run typecheck`, `bun run test:fast`, and `bun test scripts/build-changelog.test.ts src/release-workflow.test.ts`. `bun run format:check` still reports many unrelated pre-existing repo-wide Prettier warnings, so touched files were checked/formatted separately with `bunx prettier`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared patch release `v0.5.5` after the failed `v0.5.4` release attempt. Release metadata now matches the upcoming tag, the pending character-dictionary/overlay/plugin fragments are committed into `CHANGELOG.md`, and release validation now blocks future tag/package mismatches before publish.
Docs now include a short release checklist in `docs/RELEASING.md`. Validation passed for changelog lint/check, typecheck, targeted workflow tests, and the full fast test suite. Repo-wide Prettier remains noisy from unrelated existing files, but touched release files were formatted and verified.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -2,9 +2,10 @@
* SubMiner Example Configuration File
*
* This file is auto-generated from src/config/definitions.ts.
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
* Copy to %APPDATA%/SubMiner/config.jsonc on Windows, or $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) on Linux/macOS.
*/
{
// ==========================================
// Overlay Auto-Start
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
@@ -17,7 +18,7 @@
// ==========================================
"texthooker": {
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
"openBrowser": true, // Open browser setting. Values: true | false
"openBrowser": true // Open browser setting. Values: true | false
}, // Configure texthooker startup launch and browser opening behavior.
// ==========================================
@@ -27,7 +28,7 @@
// ==========================================
"websocket": {
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
"port": 6677, // Built-in subtitle websocket server port.
"port": 6677 // Built-in subtitle websocket server port.
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
// ==========================================
@@ -37,7 +38,7 @@
// ==========================================
"annotationWebsocket": {
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
"port": 6678, // Annotated subtitle websocket server port.
"port": 6678 // Annotated subtitle websocket server port.
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
// ==========================================
@@ -46,7 +47,7 @@
// Set to debug for full runtime diagnostics.
// ==========================================
"logging": {
"level": "info", // Minimum log level for runtime logging. Values: debug | info | warn | error
"level": "info" // Minimum log level for runtime logging. Values: debug | info | warn | error
}, // Controls logging verbosity.
// ==========================================
@@ -60,7 +61,7 @@
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
"jellyfinRemoteSession": true, // Warm up Jellyfin remote session at startup. Values: true | false
"jellyfinRemoteSession": true // Warm up Jellyfin remote session at startup. Values: true | false
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
// ==========================================
@@ -81,7 +82,7 @@
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
"openJimaku": "Ctrl+Shift+J", // Open jimaku setting.
"openJimaku": "Ctrl+Shift+J" // Open jimaku setting.
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
// ==========================================
@@ -101,7 +102,7 @@
"secondarySub": {
"secondarySubLanguages": [], // Secondary sub languages setting.
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
"defaultMode": "hover", // Default mode setting.
"defaultMode": "hover" // Default mode setting.
}, // Dual subtitle track options.
// ==========================================
@@ -113,7 +114,7 @@
"alass_path": "", // Alass path setting.
"ffsubsync_path": "", // Ffsubsync path setting.
"ffmpeg_path": "", // Ffmpeg path setting.
"replace": true, // Replace the active subtitle file when sync completes. Values: true | false
"replace": true // Replace the active subtitle file when sync completes. Values: true | false
}, // Subsync engine and executable paths.
// ==========================================
@@ -121,7 +122,7 @@
// Initial vertical subtitle position from the bottom.
// ==========================================
"subtitlePosition": {
"yPercent": 10, // Y percent setting.
"yPercent": 10 // Y percent setting.
}, // Initial vertical subtitle position from the bottom.
// ==========================================
@@ -158,7 +159,7 @@
"N2": "#f5a97f", // N2 setting.
"N3": "#f9e2af", // N3 setting.
"N4": "#a6e3a1", // N4 setting.
"N5": "#8aadf4", // N5 setting.
"N5": "#8aadf4" // N5 setting.
}, // Jlpt colors setting.
"frequencyDictionary": {
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
@@ -167,7 +168,13 @@
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#8bd5ca", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
"bandedColors": [
"#ed8796",
"#f5a97f",
"#f9e2af",
"#8bd5ca",
"#8aadf4"
] // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
}, // Frequency dictionary setting.
"secondary": {
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
@@ -182,8 +189,8 @@
"backgroundColor": "rgba(20, 22, 34, 0.78)", // Background color setting.
"backdropFilter": "blur(6px)", // Backdrop filter setting.
"fontWeight": "600", // Font weight setting.
"fontStyle": "normal", // Font style setting.
}, // Secondary setting.
"fontStyle": "normal" // Font style setting.
} // Secondary setting.
}, // Primary and secondary subtitle styling.
// ==========================================
@@ -194,8 +201,10 @@
"enabled": false, // Enable shared OpenAI-compatible AI provider features. Values: true | false
"apiKey": "", // Static API key for the shared OpenAI-compatible AI provider.
"apiKeyCommand": "", // Shell command used to resolve the shared AI provider API key.
"model": "openai/gpt-4o-mini", // Model setting.
"baseUrl": "https://openrouter.ai/api", // Base URL for the shared OpenAI-compatible AI provider.
"requestTimeoutMs": 15000, // Timeout in milliseconds for shared AI provider requests.
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations.", // System prompt setting.
"requestTimeoutMs": 15000 // Timeout in milliseconds for shared AI provider requests.
}, // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
// ==========================================
@@ -213,20 +222,22 @@
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
"port": 8766, // Bind port for local AnkiConnect proxy.
"upstreamUrl": "http://127.0.0.1:8765", // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
"upstreamUrl": "http://127.0.0.1:8765" // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
}, // Proxy setting.
"tags": ["SubMiner"], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
"tags": [
"SubMiner"
], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
"fields": {
"audio": "ExpressionAudio", // Audio setting.
"image": "Picture", // Image setting.
"sentence": "Sentence", // Sentence setting.
"miscInfo": "MiscInfo", // Misc info setting.
"translation": "SelectionText", // Translation setting.
"translation": "SelectionText" // Translation setting.
}, // Fields setting.
"ai": {
"enabled": false, // Enable AI provider usage for Anki translation/enrichment flows. Values: true | false
"model": "", // Optional model override for Anki AI translation/enrichment flows.
"systemPrompt": "", // Optional system prompt override for Anki AI translation/enrichment flows.
"systemPrompt": "" // Optional system prompt override for Anki AI translation/enrichment flows.
}, // Ai setting.
"media": {
"generateAudio": true, // Generate audio setting. Values: true | false
@@ -239,7 +250,7 @@
"animatedCrf": 35, // Animated crf setting.
"audioPadding": 0.5, // Audio padding setting.
"fallbackDuration": 3, // Fallback duration setting.
"maxMediaDuration": 30, // Max media duration setting.
"maxMediaDuration": 30 // Max media duration setting.
}, // Media setting.
"behavior": {
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
@@ -247,7 +258,7 @@
"mediaInsertMode": "append", // Media insert mode setting.
"highlightWord": true, // Highlight word setting. Values: true | false
"notificationType": "osd", // Notification type setting.
"autoUpdateNewCards": true, // Automatically update newly added cards. Values: true | false
"autoUpdateNewCards": true // Automatically update newly added cards. Values: true | false
}, // Behavior setting.
"nPlusOne": {
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
@@ -256,20 +267,20 @@
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
"knownWord": "#a6da95", // Color used for legacy known-word highlights.
"knownWord": "#a6da95" // Color used for legacy known-word highlights.
}, // N plus one setting.
"metadata": {
"pattern": "[SubMiner] %f (%t)", // Pattern setting.
"pattern": "[SubMiner] %f (%t)" // Pattern setting.
}, // Metadata setting.
"isLapis": {
"enabled": false, // Enabled setting. Values: true | false
"sentenceCardModel": "Japanese sentences", // Sentence card model setting.
"sentenceCardModel": "Japanese sentences" // Sentence card model setting.
}, // Is lapis setting.
"isKiku": {
"enabled": false, // Enabled setting. Values: true | false
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
"deleteDuplicateInAuto": true, // Delete duplicate in auto setting. Values: true | false
}, // Is kiku setting.
"deleteDuplicateInAuto": true // Delete duplicate in auto setting. Values: true | false
} // Is kiku setting.
}, // Automatic Anki updates and media generation options.
// ==========================================
@@ -279,7 +290,7 @@
"jimaku": {
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
"maxEntryResults": 10, // Maximum Jimaku search results returned.
"maxEntryResults": 10 // Maximum Jimaku search results returned.
}, // Jimaku API configuration and defaults.
// ==========================================
@@ -294,9 +305,12 @@
"fixWithAi": false, // Use shared AI provider to post-process whisper-generated YouTube subtitles. Values: true | false
"ai": {
"model": "", // Optional model override for YouTube subtitle AI post-processing.
"systemPrompt": "", // Optional system prompt override for YouTube subtitle AI post-processing.
"systemPrompt": "" // Optional system prompt override for YouTube subtitle AI post-processing.
}, // Ai setting.
"primarySubLanguages": ["ja", "jpn"], // Comma-separated primary subtitle language priority used by the launcher.
"primarySubLanguages": [
"ja",
"jpn"
] // Comma-separated primary subtitle language priority used by the launcher.
}, // Defaults for SubMiner YouTube subtitle generation.
// ==========================================
@@ -317,9 +331,9 @@
"collapsibleSections": {
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
"voicedBy": false, // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
}, // Collapsible sections setting.
}, // Character dictionary setting.
"voicedBy": false // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
} // Collapsible sections setting.
} // Character dictionary setting.
}, // Anilist API credentials and update behavior.
// ==========================================
@@ -343,8 +357,16 @@
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
"directPlayContainers": ["mkv", "mp4", "webm", "mov", "flac", "mp3", "aac"], // Container allowlist for direct play decisions.
"transcodeVideoCodec": "h264", // Preferred transcode video codec when direct play is unavailable.
"directPlayContainers": [
"mkv",
"mp4",
"webm",
"mov",
"flac",
"mp3",
"aac"
], // Container allowlist for direct play decisions.
"transcodeVideoCodec": "h264" // Preferred transcode video codec when direct play is unavailable.
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
// ==========================================
@@ -355,7 +377,7 @@
"discordPresence": {
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
"debounceMs": 750, // Debounce delay used to collapse bursty presence updates.
"debounceMs": 750 // Debounce delay used to collapse bursty presence updates.
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
// ==========================================
@@ -377,7 +399,7 @@
"telemetryDays": 30, // Telemetry retention window in days.
"dailyRollupsDays": 365, // Daily rollup retention window in days.
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
"vacuumIntervalDays": 7, // Minimum days between VACUUM runs.
}, // Retention setting.
}, // Enable/disable immersion tracking.
"vacuumIntervalDays": 7 // Minimum days between VACUUM runs.
} // Retention setting.
} // Enable/disable immersion tracking.
}

21
docs/RELEASING.md Normal file
View File

@@ -0,0 +1,21 @@
<!-- read_when: cutting a tagged release or debugging release prep -->
# Releasing
1. Confirm `main` is green: `gh run list --workflow CI --limit 5`.
2. Bump `package.json` to the release version.
3. Build release metadata before tagging:
`bun run changelog:build --version <version>`
4. Review `CHANGELOG.md`.
5. Run release gate locally:
`bun run changelog:check --version <version>`
`bun run test:fast`
`bun run typecheck`
6. Commit release prep.
7. Tag the commit: `git tag v<version>`.
8. Push commit + tag.
Notes:
- `changelog:check` now rejects tag/package version mismatches.
- Do not tag while `changes/*.md` fragments still exist.

View File

@@ -1,6 +1,6 @@
{
"name": "subminer",
"version": "0.5.2",
"version": "0.5.5",
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"packageManager": "bun@1.3.5",
"main": "dist/main-entry.js",
@@ -30,8 +30,8 @@
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/hyprland-tracker.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js",
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
@@ -60,7 +60,8 @@
"build:mac": "bun run build && electron-builder --mac dmg zip --publish never",
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip --publish never",
"build:mac:zip": "bun run build && electron-builder --mac zip --publish never",
"build:win": "bun run build && electron-builder --win nsis zip --publish never"
"build:win": "bun run build && electron-builder --win nsis zip --publish never",
"build:win:unsigned": "bun run build && node scripts/build-win-unsigned.mjs"
},
"keywords": [
"anki",

View File

@@ -66,7 +66,7 @@ aniskip_show_button=yes
aniskip_button_text=You can skip by pressing %s
# Keybinding to execute intro skip when button is visible.
aniskip_button_key=y-k
aniskip_button_key=TAB
# OSD hint duration in seconds (shown during first 3s of intro).
aniskip_button_duration=3

View File

@@ -1,5 +1,6 @@
local M = {}
local matcher = require("aniskip_match")
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
function M.create(ctx)
local mp = ctx.mp
@@ -464,7 +465,7 @@ function M.create(ctx)
local intro_start = state.aniskip.intro_start or -1
local hint_window_end = intro_start + 3
if in_intro and not state.aniskip.prompt_shown and now >= intro_start and now < hint_window_end then
local key = opts.aniskip_button_key ~= "" and opts.aniskip_button_key or "y-k"
local key = opts.aniskip_button_key ~= "" and opts.aniskip_button_key or DEFAULT_ANISKIP_BUTTON_KEY
local message = string.format(opts.aniskip_button_text, key)
mp.osd_message(message, tonumber(opts.aniskip_button_duration) or 3)
state.aniskip.prompt_shown = true

View File

@@ -107,12 +107,8 @@ function M.create(ctx)
end
local function find_binary_override()
local candidates = {
resolve_binary_candidate(os.getenv("SUBMINER_APPIMAGE_PATH")),
resolve_binary_candidate(os.getenv("SUBMINER_BINARY_PATH")),
}
for _, path in ipairs(candidates) do
for _, env_name in ipairs({ "SUBMINER_APPIMAGE_PATH", "SUBMINER_BINARY_PATH" }) do
local path = resolve_binary_candidate(os.getenv(env_name))
if path and path ~= "" then
return path
end

View File

@@ -1,4 +1,5 @@
local M = {}
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
local function normalize_socket_path_option(socket_path, default_socket_path)
if type(default_socket_path) ~= "string" then
@@ -42,7 +43,7 @@ function M.load(options_lib, default_socket_path)
aniskip_payload = "",
aniskip_show_button = true,
aniskip_button_text = "You can skip by pressing %s",
aniskip_button_key = "y-k",
aniskip_button_key = DEFAULT_ANISKIP_BUTTON_KEY,
aniskip_button_duration = 3,
}

View File

@@ -1,4 +1,6 @@
local M = {}
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
local LEGACY_ANISKIP_BUTTON_KEY = "y-k"
function M.create(ctx)
local mp = ctx.mp
@@ -89,8 +91,11 @@ function M.create(ctx)
aniskip.skip_intro_now()
end)
end
if opts.aniskip_button_key ~= "y-k" then
mp.add_key_binding("y-k", "subminer-skip-intro-fallback", function()
if
opts.aniskip_button_key ~= LEGACY_ANISKIP_BUTTON_KEY
and opts.aniskip_button_key ~= DEFAULT_ANISKIP_BUTTON_KEY
then
mp.add_key_binding(LEGACY_ANISKIP_BUTTON_KEY, "subminer-skip-intro-fallback", function()
aniskip.skip_intro_now()
end)
end

View File

@@ -1,15 +0,0 @@
## Highlights
### Internal
- Release: Pinned the Windows SignPath submission workflow to an explicit artifact-configuration slug instead of relying on the SignPath project's default configuration.
## Installation
See the README and docs/installation guide for full setup steps.
## Assets
- Linux: `SubMiner.AppImage`
- macOS: `SubMiner-*.dmg` and `SubMiner-*.zip`
- Optional extras: `subminer-assets.tar.gz` and the `subminer` launcher
Note: the `subminer` wrapper script uses Bun (`#!/usr/bin/env bun`), so `bun` must be installed and on `PATH`.

View File

@@ -34,12 +34,22 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
const { writeChangelogArtifacts } = await loadModule();
const workspace = createWorkspace('write-artifacts');
const projectRoot = path.join(workspace, 'SubMiner');
const existingChangelog = ['# Changelog', '', '## v0.4.0 (2026-03-01)', '- Existing fix', ''].join('\n');
const existingChangelog = [
'# Changelog',
'',
'## v0.4.0 (2026-03-01)',
'- Existing fix',
'',
].join('\n');
fs.mkdirSync(projectRoot, { recursive: true });
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8');
fs.writeFileSync(path.join(projectRoot, 'changes', 'README.md'), '# Changelog Fragments\n\nIgnored helper text.\n', 'utf8');
fs.writeFileSync(
path.join(projectRoot, 'changes', 'README.md'),
'# Changelog Fragments\n\nIgnored helper text.\n',
'utf8',
);
fs.writeFileSync(
path.join(projectRoot, 'changes', '001.md'),
['type: added', 'area: overlay', '', '- Added release fragments.'].join('\n'),
@@ -59,13 +69,10 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
});
assert.deepEqual(result.outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
assert.deepEqual(
result.deletedFragmentPaths,
[
path.join(projectRoot, 'changes', '001.md'),
path.join(projectRoot, 'changes', '002.md'),
],
);
assert.deepEqual(result.deletedFragmentPaths, [
path.join(projectRoot, 'changes', '001.md'),
path.join(projectRoot, 'changes', '002.md'),
]);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '001.md')), false);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '002.md')), false);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', 'README.md')), true);
@@ -76,7 +83,10 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
/^# Changelog\n\n## v0\.4\.1 \(2026-03-07\)\n\n### Added\n- Overlay: Added release fragments\.\n\n### Fixed\n- Release: Fixed release notes generation\.\n\n## v0\.4\.0 \(2026-03-01\)\n- Existing fix\n$/m,
);
const releaseNotes = fs.readFileSync(path.join(projectRoot, 'release', 'release-notes.md'), 'utf8');
const releaseNotes = fs.readFileSync(
path.join(projectRoot, 'release', 'release-notes.md'),
'utf8',
);
assert.match(releaseNotes, /## Highlights\n### Added\n- Overlay: Added release fragments\./);
assert.match(releaseNotes, /### Fixed\n- Release: Fixed release notes generation\./);
assert.match(releaseNotes, /## Installation\n\nSee the README and docs\/installation guide/);
@@ -92,7 +102,11 @@ test('verifyChangelogReadyForRelease ignores README but rejects pending fragment
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), '# Changelog\n', 'utf8');
fs.writeFileSync(path.join(projectRoot, 'changes', 'README.md'), '# Changelog Fragments\n', 'utf8');
fs.writeFileSync(
path.join(projectRoot, 'changes', 'README.md'),
'# Changelog Fragments\n',
'utf8',
);
fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), '- Pending fragment.\n', 'utf8');
try {
@@ -112,6 +126,33 @@ test('verifyChangelogReadyForRelease ignores README but rejects pending fragment
}
});
test('verifyChangelogReadyForRelease rejects explicit release versions that do not match package.json', async () => {
const { verifyChangelogReadyForRelease } = await loadModule();
const workspace = createWorkspace('verify-release-version-match');
const projectRoot = path.join(workspace, 'SubMiner');
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(
path.join(projectRoot, 'package.json'),
JSON.stringify({ name: 'subminer', version: '0.4.0' }, null, 2),
'utf8',
);
fs.writeFileSync(
path.join(projectRoot, 'CHANGELOG.md'),
'# Changelog\n\n## v0.4.1 (2026-03-09)\n- Ready.\n',
'utf8',
);
try {
assert.throws(
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
/package\.json version \(0\.4\.0\) does not match requested release version \(0\.4\.1\)/,
);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('verifyChangelogFragments rejects invalid metadata', async () => {
const { verifyChangelogFragments } = await loadModule();
const workspace = createWorkspace('lint-invalid');

View File

@@ -56,7 +56,10 @@ function resolveDate(date?: string): string {
return date ?? new Date().toISOString().slice(0, 10);
}
function resolvePackageVersion(cwd: string, readFileSync: (candidate: string, encoding: BufferEncoding) => string): string {
function resolvePackageVersion(
cwd: string,
readFileSync: (candidate: string, encoding: BufferEncoding) => string,
): string {
const packageJsonPath = path.join(cwd, 'package.json');
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as { version?: string };
if (!packageJson.version) {
@@ -65,22 +68,42 @@ function resolvePackageVersion(cwd: string, readFileSync: (candidate: string, en
return normalizeVersion(packageJson.version);
}
function resolveVersion(
options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>,
): string {
function resolveVersion(options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>): string {
const cwd = options.cwd ?? process.cwd();
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
return normalizeVersion(options.version ?? resolvePackageVersion(cwd, readFileSync));
}
function verifyRequestedVersionMatchesPackageVersion(
options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>,
): void {
if (!options.version) {
return;
}
const cwd = options.cwd ?? process.cwd();
const existsSync = options.deps?.existsSync ?? fs.existsSync;
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
const packageJsonPath = path.join(cwd, 'package.json');
if (!existsSync(packageJsonPath)) {
return;
}
const packageVersion = resolvePackageVersion(cwd, readFileSync);
const requestedVersion = normalizeVersion(options.version);
if (packageVersion !== requestedVersion) {
throw new Error(
`package.json version (${packageVersion}) does not match requested release version (${requestedVersion}).`,
);
}
}
function resolveChangesDir(cwd: string): string {
return path.join(cwd, 'changes');
}
function resolveFragmentPaths(
cwd: string,
deps?: ChangelogFsDeps,
): string[] {
function resolveFragmentPaths(cwd: string, deps?: ChangelogFsDeps): string[] {
const changesDir = resolveChangesDir(cwd);
const existsSync = deps?.existsSync ?? fs.existsSync;
const readdirSync = deps?.readdirSync ?? fs.readdirSync;
@@ -90,7 +113,10 @@ function resolveFragmentPaths(
}
return readdirSync(changesDir, { withFileTypes: true })
.filter((entry) => entry.isFile() && entry.name.endsWith('.md') && entry.name.toLowerCase() !== 'readme.md')
.filter(
(entry) =>
entry.isFile() && entry.name.endsWith('.md') && entry.name.toLowerCase() !== 'readme.md',
)
.map((entry) => path.join(changesDir, entry.name))
.sort();
}
@@ -112,7 +138,10 @@ function normalizeFragmentBullets(content: string): string[] {
return lines;
}
function parseFragmentMetadata(content: string, fragmentPath: string): {
function parseFragmentMetadata(
content: string,
fragmentPath: string,
): {
area: string;
body: string;
type: FragmentType;
@@ -144,9 +173,7 @@ function parseFragmentMetadata(content: string, fragmentPath: string): {
const type = metadata.get('type');
if (!type || !CHANGE_TYPES.includes(type as FragmentType)) {
throw new Error(
`${fragmentPath} must declare type as one of: ${CHANGE_TYPES.join(', ')}.`,
);
throw new Error(`${fragmentPath} must declare type as one of: ${CHANGE_TYPES.join(', ')}.`);
}
const area = metadata.get('area');
@@ -166,10 +193,7 @@ function parseFragmentMetadata(content: string, fragmentPath: string): {
};
}
function readChangeFragments(
cwd: string,
deps?: ChangelogFsDeps,
): ChangeFragment[] {
function readChangeFragments(cwd: string, deps?: ChangelogFsDeps): ChangeFragment[] {
const readFileSync = deps?.readFileSync ?? fs.readFileSync;
return resolveFragmentPaths(cwd, deps).map((fragmentPath) => {
const parsed = parseFragmentMetadata(readFileSync(fragmentPath, 'utf8'), fragmentPath);
@@ -202,7 +226,9 @@ function renderGroupedChanges(fragments: ChangeFragment[]): string {
}
const bullets = typeFragments
.flatMap((fragment) => fragment.bullets.map((bullet) => renderFragmentBullet(fragment, bullet)))
.flatMap((fragment) =>
fragment.bullets.map((bullet) => renderFragmentBullet(fragment, bullet)),
)
.join('\n');
return [`### ${CHANGE_TYPE_HEADINGS[type]}\n${bullets}`];
});
@@ -215,9 +241,7 @@ function buildReleaseSection(version: string, date: string, fragments: ChangeFra
throw new Error('No changelog fragments found in changes/.');
}
return [`## v${version} (${date})`, '', renderGroupedChanges(fragments), ''].join(
'\n',
);
return [`## v${version} (${date})`, '', renderGroupedChanges(fragments), ''].join('\n');
}
function ensureChangelogHeader(existingChangelog: string): string {
@@ -231,7 +255,11 @@ function ensureChangelogHeader(existingChangelog: string): string {
return `${CHANGELOG_HEADER}\n\n${trimmed}\n`;
}
function prependReleaseSection(existingChangelog: string, releaseSection: string, version: string): string {
function prependReleaseSection(
existingChangelog: string,
releaseSection: string,
version: string,
): string {
const normalizedExisting = ensureChangelogHeader(existingChangelog);
if (extractReleaseSectionBody(normalizedExisting, version) !== null) {
throw new Error(`CHANGELOG already contains a section for v${version}.`);
@@ -263,9 +291,7 @@ function extractReleaseSectionBody(changelog: string, version: string): string |
return body.trim();
}
export function resolveChangelogOutputPaths(options?: {
cwd?: string;
}): string[] {
export function resolveChangelogOutputPaths(options?: { cwd?: string }): string[] {
const cwd = options?.cwd ?? process.cwd();
return [path.join(cwd, 'CHANGELOG.md')];
}
@@ -290,11 +316,7 @@ function renderReleaseNotes(changes: string): string {
].join('\n');
}
function writeReleaseNotesFile(
cwd: string,
changes: string,
deps?: ChangelogFsDeps,
): string {
function writeReleaseNotesFile(cwd: string, changes: string, deps?: ChangelogFsDeps): string {
const mkdirSync = deps?.mkdirSync ?? fs.mkdirSync;
const writeFileSync = deps?.writeFileSync ?? fs.writeFileSync;
const releaseNotesPath = path.join(cwd, RELEASE_NOTES_PATH);
@@ -359,10 +381,13 @@ export function verifyChangelogFragments(options?: ChangelogOptions): void {
export function verifyChangelogReadyForRelease(options?: ChangelogOptions): void {
const cwd = options?.cwd ?? process.cwd();
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
verifyRequestedVersionMatchesPackageVersion(options ?? {});
const version = resolveVersion(options ?? {});
const pendingFragments = resolveFragmentPaths(cwd, options?.deps);
if (pendingFragments.length > 0) {
throw new Error(`Pending changelog fragments must be released first: ${pendingFragments.join(', ')}`);
throw new Error(
`Pending changelog fragments must be released first: ${pendingFragments.join(', ')}`,
);
}
const changelogPath = path.join(cwd, 'CHANGELOG.md');
@@ -382,14 +407,14 @@ function isFragmentPath(candidate: string): boolean {
function isIgnoredPullRequestPath(candidate: string): boolean {
return (
candidate === 'CHANGELOG.md'
|| candidate === 'release/release-notes.md'
|| candidate === 'AGENTS.md'
|| candidate === 'README.md'
|| candidate.startsWith('changes/')
|| candidate.startsWith('docs/')
|| candidate.startsWith('.github/')
|| candidate.startsWith('backlog/')
candidate === 'CHANGELOG.md' ||
candidate === 'release/release-notes.md' ||
candidate === 'AGENTS.md' ||
candidate === 'README.md' ||
candidate.startsWith('changes/') ||
candidate.startsWith('docs/') ||
candidate.startsWith('.github/') ||
candidate.startsWith('backlog/')
);
}
@@ -412,9 +437,7 @@ export function verifyPullRequestChangelog(options: PullRequestChangelogOptions)
const hasFragment = normalizedEntries.some(
(entry) => entry.status !== 'D' && isFragmentPath(entry.path),
);
const requiresFragment = normalizedEntries.some(
(entry) => !isIgnoredPullRequestPath(entry.path),
);
const requiresFragment = normalizedEntries.some((entry) => !isIgnoredPullRequestPath(entry.path));
if (requiresFragment && !hasFragment) {
throw new Error(

View File

@@ -0,0 +1,30 @@
import { spawnSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
const env = { ...process.env };
for (const name of [
'CSC_LINK',
'CSC_KEY_PASSWORD',
'WIN_CSC_LINK',
'WIN_CSC_KEY_PASSWORD',
'CSC_NAME',
'WIN_CSC_NAME',
]) {
delete env[name];
}
env.CSC_IDENTITY_AUTO_DISCOVERY = 'false';
const electronBuilderCli = fileURLToPath(new URL('../node_modules/electron-builder/out/cli/cli.js', import.meta.url));
const result = spawnSync(process.execPath, [electronBuilderCli, '--win', 'nsis', 'zip', '--publish', 'never'], {
stdio: 'inherit',
env,
});
if (result.error) {
throw result.error;
}
process.exit(result.status ?? 1);

View File

@@ -6,6 +6,8 @@ local function run_plugin_scenario(config)
sync_calls = {},
script_messages = {},
events = {},
observers = {},
key_bindings = {},
osd = {},
logs = {},
property_sets = {},
@@ -37,10 +39,29 @@ local function run_plugin_scenario(config)
return ""
end
function mp.get_property_native(_name)
function mp.get_property_native(name)
if name == "osd-dimensions" then
return config.osd_dimensions or {
w = 1280,
h = config.osd_height or 720,
}
end
return config.chapter_list or {}
end
function mp.get_property_number(name)
if name == "time-pos" then
return config.time_pos
end
if name == "sub-pos" then
return config.sub_pos or 100
end
if name == "osd-height" then
return config.osd_height or 720
end
return nil
end
function mp.get_script_directory()
return "plugin/subminer"
end
@@ -123,7 +144,13 @@ local function run_plugin_scenario(config)
recorded.script_messages[name] = fn
end
function mp.add_key_binding(_keys, _name, _fn) end
function mp.add_key_binding(keys, name, fn)
recorded.key_bindings[#recorded.key_bindings + 1] = {
keys = keys,
name = name,
fn = fn,
}
end
function mp.register_event(name, fn)
if recorded.events[name] == nil then
recorded.events[name] = {}
@@ -131,7 +158,12 @@ local function run_plugin_scenario(config)
recorded.events[name][#recorded.events[name] + 1] = fn
end
function mp.add_hook(_name, _prio, _fn) end
function mp.observe_property(_name, _kind, _fn) end
function mp.observe_property(name, _kind, fn)
if recorded.observers[name] == nil then
recorded.observers[name] = {}
end
recorded.observers[name][#recorded.observers[name] + 1] = fn
end
function mp.osd_message(message, _duration)
recorded.osd[#recorded.osd + 1] = message
end
@@ -177,6 +209,12 @@ local function run_plugin_scenario(config)
end
function utils.parse_json(json)
if json == '{"enabled":true,"amount":125}' then
return {
enabled = true,
amount = 125,
}, nil
end
if json == "__MAL_FOUND__" then
return {
categories = {
@@ -213,6 +251,26 @@ local function run_plugin_scenario(config)
package.loaded["mp.msg"] = nil
package.loaded["mp.options"] = nil
package.loaded["mp.utils"] = nil
package.loaded["binary"] = nil
package.loaded["bootstrap"] = nil
package.loaded["environment"] = nil
package.loaded["hover"] = nil
package.loaded["init"] = nil
package.loaded["lifecycle"] = nil
package.loaded["log"] = nil
package.loaded["messages"] = nil
package.loaded["options"] = nil
package.loaded["process"] = nil
package.loaded["state"] = nil
package.loaded["ui"] = nil
package.loaded["aniskip"] = nil
_G.__subminer_plugin_bootstrapped = nil
local original_package_config = package.config
if config.platform == "windows" then
package.config = "\\\n;\n?\n!\n-\n"
else
package.config = "/\n;\n?\n!\n-\n"
end
package.preload["mp"] = function()
return mp
@@ -246,6 +304,7 @@ local function run_plugin_scenario(config)
end
local ok, err = pcall(dofile, "plugin/subminer/main.lua")
package.config = original_package_config
if not ok then
return nil, err, recorded
end
@@ -412,6 +471,22 @@ local function fire_event(recorded, name)
end
end
local function fire_observer(recorded, name, value)
local listeners = recorded.observers[name] or {}
for _, listener in ipairs(listeners) do
listener(name, value)
end
end
local function has_key_binding(recorded, keys, name)
for _, binding in ipairs(recorded.key_bindings or {}) do
if binding.keys == keys and binding.name == name then
return true
end
end
return false
end
local binary_path = "/tmp/subminer-binary"
do
@@ -516,6 +591,38 @@ do
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
option_overrides = {
binary_path = binary_path,
auto_start = "no",
},
media_title = "Sample Show S01E01",
time_pos = 13,
mal_lookup_stdout = "__MAL_FOUND__",
aniskip_stdout = "__ANISKIP_FOUND__",
files = {
[binary_path] = true,
},
})
assert_true(recorded ~= nil, "plugin failed to load for default AniSkip keybinding scenario: " .. tostring(err))
assert_true(
has_key_binding(recorded, "TAB", "subminer-skip-intro"),
"default AniSkip keybinding should register TAB"
)
assert_true(
not has_key_binding(recorded, "y-k", "subminer-skip-intro-fallback"),
"default AniSkip keybinding should not also register legacy y-k fallback"
)
recorded.script_messages["subminer-aniskip-refresh"]()
fire_observer(recorded, "time-pos", 13)
assert_true(
has_osd_message(recorded.osd, "You can skip by pressing TAB"),
"default AniSkip prompt should mention TAB"
)
end
do
local recorded, err = run_plugin_scenario({
process_list = "",
@@ -552,7 +659,7 @@ do
not has_property_set(recorded.property_sets, "pause", true),
"auto-start visible overlay should not force pause without explicit pause-until-ready option"
)
end
end
do
local recorded, err = run_plugin_scenario({

View File

@@ -73,3 +73,10 @@ test('default keybindings include primary and secondary subtitle track cycling o
assert.deepEqual(keybindingMap.get('KeyJ'), ['cycle', 'sid']);
assert.deepEqual(keybindingMap.get('Shift+KeyJ'), ['cycle', 'secondary-sid']);
});
test('default keybindings include fullscreen on F', () => {
const keybindingMap = new Map(
DEFAULT_KEYBINDINGS.map((binding) => [binding.key, binding.command]),
);
assert.deepEqual(keybindingMap.get('KeyF'), ['cycle', 'fullscreen']);
});

View File

@@ -50,6 +50,7 @@ export const SPECIAL_COMMANDS = {
export const DEFAULT_KEYBINDINGS: NonNullable<ResolvedConfig['keybindings']> = [
{ key: 'Space', command: ['cycle', 'pause'] },
{ key: 'KeyF', command: ['cycle', 'fullscreen'] },
{ key: 'KeyJ', command: ['cycle', 'sid'] },
{ key: 'Shift+KeyJ', command: ['cycle', 'secondary-sid'] },
{ key: 'ArrowRight', command: ['seek', 5] },

View File

@@ -34,6 +34,44 @@ test('guessAnilistMediaInfo falls back to parser when guessit fails', async () =
});
});
test('guessAnilistMediaInfo uses basename for guessit input', async () => {
const mediaPath =
'/truenas/jellyfin/anime/Rascal-Does-not-Dream-of-Bunny-Girl-Senapi/Season-1/Rascal Does Not Dream of Bunny Girl Senpai (2018) - S01E01 - 001 - My Senpai Is a Bunny Girl [Bluray-1080p][10bit][x265][Opus 2.0][JA]-Subs.mkv';
const seenTargets: string[] = [];
const result = await guessAnilistMediaInfo(mediaPath, null, {
runGuessit: async (target) => {
seenTargets.push(target);
return JSON.stringify({
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
episode: 1,
});
},
});
assert.deepEqual(seenTargets, [
'Rascal Does Not Dream of Bunny Girl Senpai (2018) - S01E01 - 001 - My Senpai Is a Bunny Girl [Bluray-1080p][10bit][x265][Opus 2.0][JA]-Subs.mkv',
]);
assert.deepEqual(result, {
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
episode: 1,
source: 'guessit',
});
});
test('guessAnilistMediaInfo joins multi-part guessit titles', async () => {
const result = await guessAnilistMediaInfo('/tmp/demo.mkv', null, {
runGuessit: async () =>
JSON.stringify({
title: ['Rascal', 'Does-not-Dream-of-Bunny-Girl-Senpai'],
episode: 1,
}),
});
assert.deepEqual(result, {
title: 'Rascal Does not Dream of Bunny Girl Senpai',
episode: 1,
source: 'guessit',
});
});
test('updateAnilistPostWatchProgress updates progress when behind', async () => {
const originalFetch = globalThis.fetch;
let call = 0;

View File

@@ -1,4 +1,5 @@
import * as childProcess from 'child_process';
import * as path from 'path';
import { parseMediaInfo } from '../../../jimaku/utils';
@@ -90,6 +91,32 @@ function firstString(value: unknown): string | null {
return null;
}
function normalizeGuessitTitlePart(value: string): string {
return value
.replace(/[._]+/g, ' ')
.replace(/-/g, ' ')
.replace(/\s+/g, ' ')
.trim();
}
function readGuessitTitle(value: unknown): string | null {
if (typeof value === 'string') {
const normalized = normalizeGuessitTitlePart(value);
return normalized.length > 0 ? normalized : null;
}
if (Array.isArray(value)) {
const parts = value
.filter((item): item is string => typeof item === 'string')
.map((item) => normalizeGuessitTitlePart(item))
.filter((item) => item.length > 0);
if (parts.length === 0) {
return null;
}
return parts.join(' ').replace(/\s+/g, ' ').trim();
}
return null;
}
function firstPositiveInteger(value: unknown): number | null {
if (typeof value === 'number' && Number.isInteger(value) && value > 0) {
return value;
@@ -184,12 +211,13 @@ export async function guessAnilistMediaInfo(
deps: GuessAnilistMediaInfoDeps = { runGuessit },
): Promise<AnilistMediaGuess | null> {
const target = mediaPath ?? mediaTitle;
const guessitTarget = mediaPath ? path.basename(mediaPath) : mediaTitle;
if (target && target.trim().length > 0) {
if (guessitTarget && guessitTarget.trim().length > 0) {
try {
const stdout = await deps.runGuessit(target);
const stdout = await deps.runGuessit(guessitTarget);
const parsed = JSON.parse(stdout) as Record<string, unknown>;
const title = firstString(parsed.title);
const title = readGuessitTitle(parsed.title);
const episode = firstPositiveInteger(parsed.episode);
if (title) {
return { title, episode, source: 'guessit' };

View File

@@ -72,6 +72,10 @@ export {
syncOverlayWindowLayer,
updateOverlayWindowBounds,
} from './overlay-window';
export {
handleOverlayWindowBeforeInputEvent,
isTabInputForMpvForwarding,
} from './overlay-window-input';
export { initializeOverlayRuntime } from './overlay-runtime-init';
export { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility';
export {

View File

@@ -77,7 +77,7 @@ test('macOS keeps visible overlay hidden while tracker is not ready and emits on
assert.ok(!calls.includes('show'));
});
test('non-macOS keeps fallback visible overlay behavior when tracker is not ready', () => {
test('tracked non-macOS overlay stays hidden while tracker is not ready', () => {
const { window, calls } = createMainWindowRecorder();
let trackerWarning = false;
const tracker: WindowTrackerStub = {
@@ -116,7 +116,48 @@ test('non-macOS keeps fallback visible overlay behavior when tracker is not read
} as never);
assert.equal(trackerWarning, true);
assert.ok(calls.includes('update-bounds'));
assert.ok(calls.includes('hide'));
assert.ok(!calls.includes('update-bounds'));
assert.ok(!calls.includes('show'));
assert.ok(!calls.includes('focus'));
assert.ok(!calls.includes('osd'));
});
test('untracked non-macOS overlay keeps fallback visible behavior when no tracker exists', () => {
const { window, calls } = createMainWindowRecorder();
let trackerWarning = false;
updateVisibleOverlayVisibility({
visibleOverlayVisible: true,
mainWindow: window as never,
windowTracker: null,
trackerNotReadyWarningShown: trackerWarning,
setTrackerNotReadyWarningShown: (shown: boolean) => {
trackerWarning = shown;
},
updateVisibleOverlayBounds: () => {
calls.push('update-bounds');
},
ensureOverlayWindowLevel: () => {
calls.push('ensure-level');
},
syncPrimaryOverlayWindowLayer: () => {
calls.push('sync-layer');
},
enforceOverlayLayerOrder: () => {
calls.push('enforce-order');
},
syncOverlayShortcuts: () => {
calls.push('sync-shortcuts');
},
isMacOSPlatform: false,
showOverlayLoadingOsd: () => {
calls.push('osd');
},
resolveFallbackBounds: () => ({ x: 12, y: 24, width: 640, height: 360 }),
} as never);
assert.equal(trackerWarning, false);
assert.ok(calls.includes('show'));
assert.ok(calls.includes('focus'));
assert.ok(!calls.includes('osd'));

View File

@@ -84,19 +84,7 @@ export function updateVisibleOverlayVisibility(args: {
}
}
if (args.isMacOSPlatform || args.isWindowsPlatform) {
mainWindow.hide();
args.syncOverlayShortcuts();
return;
}
const fallbackBounds = args.resolveFallbackBounds?.();
if (!fallbackBounds) return;
args.updateVisibleOverlayBounds(fallbackBounds);
args.syncPrimaryOverlayWindowLayer('visible');
showPassiveVisibleOverlay();
args.enforceOverlayLayerOrder();
mainWindow.hide();
args.syncOverlayShortcuts();
}

View File

@@ -0,0 +1,61 @@
export type OverlayWindowKind = 'visible' | 'modal';
export function isTabInputForMpvForwarding(input: Electron.Input): boolean {
if (input.type !== 'keyDown' || input.isAutoRepeat) return false;
if (input.alt || input.control || input.meta || input.shift) return false;
return input.code === 'Tab' || input.key === 'Tab';
}
function isLookupWindowToggleInput(input: Electron.Input): boolean {
if (input.type !== 'keyDown') return false;
if (input.alt) return false;
if (!input.control && !input.meta) return false;
if (input.shift) return false;
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
return input.code === 'KeyY' || normalizedKey === 'y';
}
function isKeyboardModeToggleInput(input: Electron.Input): boolean {
if (input.type !== 'keyDown') return false;
if (input.alt) return false;
if (!input.control && !input.meta) return false;
if (!input.shift) return false;
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
return input.code === 'KeyY' || normalizedKey === 'y';
}
export function handleOverlayWindowBeforeInputEvent(options: {
kind: OverlayWindowKind;
windowVisible: boolean;
input: Electron.Input;
preventDefault: () => void;
sendKeyboardModeToggleRequested: () => void;
sendLookupWindowToggleRequested: () => void;
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
forwardTabToMpv: () => void;
}): boolean {
if (options.kind === 'modal') return false;
if (!options.windowVisible) return false;
if (isKeyboardModeToggleInput(options.input)) {
options.preventDefault();
options.sendKeyboardModeToggleRequested();
return true;
}
if (isLookupWindowToggleInput(options.input)) {
options.preventDefault();
options.sendLookupWindowToggleRequested();
return true;
}
if (isTabInputForMpvForwarding(options.input)) {
options.preventDefault();
options.forwardTabToMpv();
return true;
}
if (!options.tryHandleOverlayShortcutLocalFallback(options.input)) return false;
options.preventDefault();
return true;
}

View File

@@ -0,0 +1,84 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import {
handleOverlayWindowBeforeInputEvent,
isTabInputForMpvForwarding,
} from './overlay-window-input';
test('isTabInputForMpvForwarding matches bare Tab keydown only', () => {
assert.equal(
isTabInputForMpvForwarding({
type: 'keyDown',
key: 'Tab',
code: 'Tab',
} as Electron.Input),
true,
);
assert.equal(
isTabInputForMpvForwarding({
type: 'keyDown',
key: 'Tab',
code: 'Tab',
shift: true,
} as Electron.Input),
false,
);
assert.equal(
isTabInputForMpvForwarding({
type: 'keyUp',
key: 'Tab',
code: 'Tab',
} as Electron.Input),
false,
);
});
test('handleOverlayWindowBeforeInputEvent forwards Tab to mpv for visible overlays', () => {
const calls: string[] = [];
const handled = handleOverlayWindowBeforeInputEvent({
kind: 'visible',
windowVisible: true,
input: {
type: 'keyDown',
key: 'Tab',
code: 'Tab',
} as Electron.Input,
preventDefault: () => calls.push('prevent-default'),
sendKeyboardModeToggleRequested: () => calls.push('keyboard-mode'),
sendLookupWindowToggleRequested: () => calls.push('lookup-toggle'),
tryHandleOverlayShortcutLocalFallback: () => {
calls.push('fallback');
return false;
},
forwardTabToMpv: () => calls.push('forward-tab'),
});
assert.equal(handled, true);
assert.deepEqual(calls, ['prevent-default', 'forward-tab']);
});
test('handleOverlayWindowBeforeInputEvent leaves modal Tab handling alone', () => {
const calls: string[] = [];
const handled = handleOverlayWindowBeforeInputEvent({
kind: 'modal',
windowVisible: true,
input: {
type: 'keyDown',
key: 'Tab',
code: 'Tab',
} as Electron.Input,
preventDefault: () => calls.push('prevent-default'),
sendKeyboardModeToggleRequested: () => calls.push('keyboard-mode'),
sendLookupWindowToggleRequested: () => calls.push('lookup-toggle'),
tryHandleOverlayShortcutLocalFallback: () => {
calls.push('fallback');
return false;
},
forwardTabToMpv: () => calls.push('forward-tab'),
});
assert.equal(handled, false);
assert.deepEqual(calls, []);
});

View File

@@ -3,6 +3,10 @@ import * as path from 'path';
import { WindowGeometry } from '../../types';
import { createLogger } from '../../logger';
import { IPC_CHANNELS } from '../../shared/ipc/contracts';
import {
handleOverlayWindowBeforeInputEvent,
type OverlayWindowKind,
} from './overlay-window-input';
const logger = createLogger('main:overlay-window');
const overlayWindowLayerByInstance = new WeakMap<BrowserWindow, OverlayWindowKind>();
@@ -23,26 +27,6 @@ function loadOverlayWindowLayer(window: BrowserWindow, layer: OverlayWindowKind)
});
}
export type OverlayWindowKind = 'visible' | 'modal';
function isLookupWindowToggleInput(input: Electron.Input): boolean {
if (input.type !== 'keyDown') return false;
if (input.alt) return false;
if (!input.control && !input.meta) return false;
if (input.shift) return false;
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
return input.code === 'KeyY' || normalizedKey === 'y';
}
function isKeyboardModeToggleInput(input: Electron.Input): boolean {
if (input.type !== 'keyDown') return false;
if (input.alt) return false;
if (!input.control && !input.meta) return false;
if (!input.shift) return false;
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
return input.code === 'KeyY' || normalizedKey === 'y';
}
export function updateOverlayWindowBounds(
geometry: WindowGeometry,
window: BrowserWindow | null,
@@ -92,6 +76,7 @@ export function createOverlayWindow(
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
isOverlayVisible: (kind: OverlayWindowKind) => boolean;
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
forwardTabToMpv: () => void;
onWindowClosed: (kind: OverlayWindowKind) => void;
},
): BrowserWindow {
@@ -142,20 +127,19 @@ export function createOverlayWindow(
}
window.webContents.on('before-input-event', (event, input) => {
if (kind === 'modal') return;
if (!window.isVisible()) return;
if (isKeyboardModeToggleInput(input)) {
event.preventDefault();
window.webContents.send(IPC_CHANNELS.event.keyboardModeToggleRequested);
return;
}
if (isLookupWindowToggleInput(input)) {
event.preventDefault();
window.webContents.send(IPC_CHANNELS.event.lookupWindowToggleRequested);
return;
}
if (!options.tryHandleOverlayShortcutLocalFallback(input)) return;
event.preventDefault();
handleOverlayWindowBeforeInputEvent({
kind,
windowVisible: window.isVisible(),
input,
preventDefault: () => event.preventDefault(),
sendKeyboardModeToggleRequested: () =>
window.webContents.send(IPC_CHANNELS.event.keyboardModeToggleRequested),
sendLookupWindowToggleRequested: () =>
window.webContents.send(IPC_CHANNELS.event.lookupWindowToggleRequested),
tryHandleOverlayShortcutLocalFallback: (nextInput) =>
options.tryHandleOverlayShortcutLocalFallback(nextInput),
forwardTabToMpv: () => options.forwardTabToMpv(),
});
});
window.hide();
@@ -185,3 +169,5 @@ export function syncOverlayWindowLayer(window: BrowserWindow, layer: 'visible'):
if (overlayWindowLayerByInstance.get(window) === layer) return;
loadOverlayWindowLayer(window, layer);
}
export type { OverlayWindowKind } from './overlay-window-input';

View File

@@ -372,6 +372,9 @@ import { createMediaRuntimeService } from './main/media-runtime';
import { createOverlayVisibilityRuntimeService } from './main/overlay-visibility-runtime';
import { createCharacterDictionaryRuntimeService } from './main/character-dictionary-runtime';
import { createCharacterDictionaryAutoSyncRuntimeService } from './main/runtime/character-dictionary-auto-sync';
import { notifyCharacterDictionaryAutoSyncStatus } from './main/runtime/character-dictionary-auto-sync-notifications';
import { createCurrentMediaTokenizationGate } from './main/runtime/current-media-tokenization-gate';
import { createStartupOsdSequencer } from './main/runtime/startup-osd-sequencer';
import {
getPreferredYomitanAnkiServerUrl as getPreferredYomitanAnkiServerUrlRuntime,
shouldForceOverrideYomitanAnkiServer,
@@ -913,6 +916,10 @@ const configDerivedRuntime = createConfigDerivedRuntime(buildConfigDerivedRuntim
const subsyncRuntime = createMainSubsyncRuntime(buildMainSubsyncRuntimeMainDepsHandler());
let autoPlayReadySignalMediaPath: string | null = null;
let autoPlayReadySignalGeneration = 0;
const currentMediaTokenizationGate = createCurrentMediaTokenizationGate();
const startupOsdSequencer = createStartupOsdSequencer({
showOsd: (message) => showMpvOsd(message),
});
function maybeSignalPluginAutoplayReady(
payload: SubtitleData,
@@ -1324,8 +1331,13 @@ const characterDictionaryAutoSyncRuntime = createCharacterDictionaryAutoSyncRunt
profileScope: config.profileScope,
};
},
getOrCreateCurrentSnapshot: () => characterDictionaryRuntime.getOrCreateCurrentSnapshot(),
getOrCreateCurrentSnapshot: (targetPath, progress) =>
characterDictionaryRuntime.getOrCreateCurrentSnapshot(targetPath, progress),
buildMergedDictionary: (mediaIds) => characterDictionaryRuntime.buildMergedDictionary(mediaIds),
waitForYomitanMutationReady: () =>
currentMediaTokenizationGate.waitUntilReady(
appState.currentMediaPath?.trim() || appState.mpvClient?.currentVideoPath?.trim() || null,
),
getYomitanDictionaryInfo: async () => {
await ensureYomitanExtensionLoaded();
return await getYomitanDictionaryInfo(getYomitanParserRuntimeDeps(), {
@@ -1364,6 +1376,24 @@ const characterDictionaryAutoSyncRuntime = createCharacterDictionaryAutoSyncRunt
clearSchedule: (timer) => clearTimeout(timer),
logInfo: (message) => logger.info(message),
logWarn: (message) => logger.warn(message),
onSyncStatus: (event) => {
notifyCharacterDictionaryAutoSyncStatus(event, {
getNotificationType: () => getResolvedConfig().ankiConnect.behavior.notificationType,
showOsd: (message) => showMpvOsd(message),
showDesktopNotification: (title, options) => showDesktopNotification(title, options),
startupOsdSequencer,
});
},
onSyncComplete: ({ mediaId, mediaTitle, changed }) => {
if (appState.yomitanParserWindow) {
clearYomitanParserCachesForWindow(appState.yomitanParserWindow);
}
subtitleProcessingController.invalidateTokenizationCache();
subtitleProcessingController.refreshCurrentSubtitle(appState.currentSubText);
logger.info(
`[dictionary:auto-sync] refreshed current subtitle after sync (AniList ${mediaId}, changed=${changed ? 'yes' : 'no'}, title=${mediaTitle})`,
);
},
});
const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService(
@@ -2673,6 +2703,8 @@ const {
},
updateCurrentMediaPath: (path) => {
autoPlayReadySignalMediaPath = null;
currentMediaTokenizationGate.updateCurrentMediaPath(path);
startupOsdSequencer.reset();
if (path) {
ensureImmersionTrackerStarted();
}
@@ -2793,6 +2825,10 @@ const {
getYomitanGroupDebugEnabled: () => appState.overlayDebugVisualizationEnabled,
getMecabTokenizer: () => appState.mecabTokenizer,
onTokenizationReady: (text) => {
currentMediaTokenizationGate.markReady(
appState.currentMediaPath?.trim() || appState.mpvClient?.currentVideoPath?.trim() || null,
);
startupOsdSequencer.markTokenizationReady();
maybeSignalPluginAutoplayReady({ text, tokens: null }, { forceWhilePaused: true });
},
},
@@ -2812,6 +2848,9 @@ const {
ensureFrequencyDictionaryLookup: () =>
frequencyDictionaryRuntime.ensureFrequencyDictionaryLookup(),
showMpvOsd: (message: string) => showMpvOsd(message),
showLoadingOsd: (message: string) => startupOsdSequencer.showAnnotationLoading(message),
showLoadedOsd: (message: string) =>
startupOsdSequencer.markAnnotationLoadingComplete(message),
shouldShowOsdNotification: () => {
const type = getResolvedConfig().ankiConnect.behavior.notificationType;
return type === 'osd' || type === 'both';
@@ -3475,6 +3514,7 @@ const { createMainWindow: createMainWindowHandler, createModalWindow: createModa
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
tryHandleOverlayShortcutLocalFallback: (input) =>
overlayShortcutsRuntime.tryHandleOverlayShortcutLocalFallback(input),
forwardTabToMpv: () => sendMpvCommandRuntime(appState.mpvClient, ['keypress', 'TAB']),
onWindowClosed: (windowKind) => {
if (windowKind === 'visible') {
overlayManager.setMainWindow(null);

View File

@@ -213,7 +213,7 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
assert.equal(roleBadgeDiv.tag, 'div');
const badge = roleBadgeDiv.content as { tag: string; content: string };
assert.equal(badge.tag, 'span');
assert.equal(badge.content, 'Side Character');
assert.equal(badge.content, 'Main Character');
const descSection = children.find(
(c) =>
@@ -695,6 +695,128 @@ test('generateForCurrentMedia adds kana aliases for romanized names when native
}
});
test('generateForCurrentMedia indexes kanji family and given names using AniList first and last hints', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
if (url === GRAPHQL_URL) {
const body = JSON.parse(String(init?.body ?? '{}')) as {
query?: string;
};
if (body.query?.includes('Page(perPage: 10)')) {
return new Response(
JSON.stringify({
data: {
Page: {
media: [
{
id: 37450,
episodes: 13,
title: {
romaji: 'Seishun Buta Yarou wa Bunny Girl Senpai no Yume wo Minai',
english: 'Rascal Does Not Dream of Bunny Girl Senpai',
native: '青春ブタ野郎はバニーガール先輩の夢を見ない',
},
},
],
},
},
}),
{
status: 200,
headers: { 'content-type': 'application/json' },
},
);
}
if (body.query?.includes('characters(page: $page')) {
return new Response(
JSON.stringify({
data: {
Media: {
title: {
romaji: 'Seishun Buta Yarou wa Bunny Girl Senpai no Yume wo Minai',
english: 'Rascal Does Not Dream of Bunny Girl Senpai',
native: '青春ブタ野郎はバニーガール先輩の夢を見ない',
},
characters: {
pageInfo: { hasNextPage: false },
edges: [
{
role: 'SUPPORTING',
node: {
id: 77,
description: 'Classmate.',
image: null,
name: {
first: 'Yuuma',
full: 'Yuuma Kunimi',
last: 'Kunimi',
native: '国見佑真',
},
},
},
],
},
},
},
}),
{
status: 200,
headers: { 'content-type': 'application/json' },
},
);
}
}
throw new Error(`Unexpected fetch URL: ${url}`);
}) as typeof globalThis.fetch;
try {
const runtime = createCharacterDictionaryRuntimeService({
userDataPath,
getCurrentMediaPath: () => '/tmp/bunny-girl-senpai-s01e01.mkv',
getCurrentMediaTitle: () => 'Rascal Does Not Dream of Bunny Girl Senpai - S01E01',
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
guessAnilistMediaInfo: async () => ({
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
episode: 1,
source: 'fallback',
}),
now: () => 1_700_000_000_000,
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
const familyName = termBank.find(([term]) => term === '国見');
assert.ok(familyName, 'expected kanji family-name term from AniList hints');
assert.equal(familyName[1], 'くにみ');
const givenName = termBank.find(([term]) => term === '佑真');
assert.ok(givenName, 'expected kanji given-name term from AniList hints');
assert.equal(givenName[1], 'ゆうま');
} finally {
globalThis.fetch = originalFetch;
}
});
test('generateForCurrentMedia indexes AniList alternative character names for alias lookups', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
@@ -812,6 +934,520 @@ test('generateForCurrentMedia indexes AniList alternative character names for al
}
});
test('generateForCurrentMedia skips AniList characters without a native name when other valid characters exist', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
if (url === GRAPHQL_URL) {
const body = JSON.parse(String(init?.body ?? '{}')) as {
query?: string;
};
if (body.query?.includes('Page(perPage: 10)')) {
return new Response(
JSON.stringify({
data: {
Page: {
media: [
{
id: 130298,
episodes: 20,
title: {
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
english: 'The Eminence in Shadow',
native: '陰の実力者になりたくて!',
},
},
],
},
},
}),
{ status: 200, headers: { 'content-type': 'application/json' } },
);
}
if (body.query?.includes('characters(page: $page')) {
return new Response(
JSON.stringify({
data: {
Media: {
title: {
english: 'The Eminence in Shadow',
},
characters: {
pageInfo: { hasNextPage: false },
edges: [
{
role: 'MAIN',
node: {
id: 111,
description: 'Valid native name.',
image: null,
name: {
full: 'Alpha',
native: 'アルファ',
first: 'Alpha',
last: null,
},
},
},
{
role: 'SUPPORTING',
node: {
id: 222,
description: 'Missing native name.',
image: null,
name: {
full: 'John Smith',
native: '',
first: 'John',
last: 'Smith',
},
},
},
],
},
},
},
}),
{ status: 200, headers: { 'content-type': 'application/json' } },
);
}
}
throw new Error(`Unexpected fetch URL: ${url}`);
}) as typeof globalThis.fetch;
try {
const runtime = createCharacterDictionaryRuntimeService({
userDataPath,
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
guessAnilistMediaInfo: async () => ({
title: 'The Eminence in Shadow',
episode: 5,
source: 'fallback',
}),
now: () => 1_700_000_000_000,
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
assert.ok(termBank.find(([term]) => term === 'アルファ'));
assert.equal(
termBank.some(([term]) => term === 'John Smith'),
false,
);
} finally {
globalThis.fetch = originalFetch;
}
});
test('generateForCurrentMedia uses AniList first and last name hints to build kanji readings', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
if (url === GRAPHQL_URL) {
const body = JSON.parse(String(init?.body ?? '{}')) as {
query?: string;
};
if (body.query?.includes('Page(perPage: 10)')) {
return new Response(
JSON.stringify({
data: {
Page: {
media: [
{
id: 20594,
episodes: 10,
title: {
romaji: 'Kono Subarashii Sekai ni Shukufuku wo!',
english: 'KONOSUBA -Gods blessing on this wonderful world!',
native: 'この素晴らしい世界に祝福を!',
},
},
],
},
},
}),
{
status: 200,
headers: { 'content-type': 'application/json' },
},
);
}
if (body.query?.includes('characters(page: $page')) {
return new Response(
JSON.stringify({
data: {
Media: {
title: {
romaji: 'Kono Subarashii Sekai ni Shukufuku wo!',
english: 'KONOSUBA -Gods blessing on this wonderful world!',
native: 'この素晴らしい世界に祝福を!',
},
characters: {
pageInfo: { hasNextPage: false },
edges: [
{
role: 'MAIN',
node: {
id: 1,
description: 'The protagonist.',
image: null,
name: {
full: 'Satou Kazuma',
native: '佐藤和真',
first: '和真',
last: '佐藤',
},
},
},
],
},
},
},
}),
{
status: 200,
headers: { 'content-type': 'application/json' },
},
);
}
}
throw new Error(`Unexpected fetch URL: ${url}`);
}) as typeof globalThis.fetch;
try {
const runtime = createCharacterDictionaryRuntimeService({
userDataPath,
getCurrentMediaPath: () => '/tmp/konosuba-s02e05.mkv',
getCurrentMediaTitle: () => 'Konosuba S02E05',
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
guessAnilistMediaInfo: async () => ({
title: 'Konosuba',
episode: 5,
source: 'fallback',
}),
now: () => 1_700_000_000_000,
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
assert.equal(termBank.find(([term]) => term === '佐藤和真')?.[1], 'さとうかずま');
assert.equal(termBank.find(([term]) => term === '佐藤')?.[1], 'さとう');
assert.equal(termBank.find(([term]) => term === '和真')?.[1], 'かずま');
} finally {
globalThis.fetch = originalFetch;
}
});
test('generateForCurrentMedia includes AniList gender age birthday and blood type in character information', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
if (url === GRAPHQL_URL) {
const body = JSON.parse(String(init?.body ?? '{}')) as {
query?: string;
};
if (body.query?.includes('Page(perPage: 10)')) {
return new Response(
JSON.stringify({
data: {
Page: {
media: [
{
id: 130298,
episodes: 20,
title: {
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
english: 'The Eminence in Shadow',
native: '陰の実力者になりたくて!',
},
},
],
},
},
}),
{ status: 200, headers: { 'content-type': 'application/json' } },
);
}
if (body.query?.includes('characters(page: $page')) {
return new Response(
JSON.stringify({
data: {
Media: {
title: {
english: 'The Eminence in Shadow',
},
characters: {
pageInfo: { hasNextPage: false },
edges: [
{
role: 'SUPPORTING',
node: {
id: 123,
description: 'Second princess of Midgar.',
image: null,
gender: 'Female',
age: '15',
dateOfBirth: {
month: 9,
day: 1,
},
bloodType: 'A',
name: {
full: 'Alexia Midgar',
native: 'アレクシア・ミドガル',
first: 'Alexia',
last: 'Midgar',
},
},
},
],
},
},
},
}),
{ status: 200, headers: { 'content-type': 'application/json' } },
);
}
}
throw new Error(`Unexpected fetch URL: ${url}`);
}) as typeof globalThis.fetch;
try {
const runtime = createCharacterDictionaryRuntimeService({
userDataPath,
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
guessAnilistMediaInfo: async () => ({
title: 'The Eminence in Shadow',
episode: 5,
source: 'fallback',
}),
now: () => 1_700_000_000_000,
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
const alexia = termBank.find(([term]) => term === 'アレクシア');
assert.ok(alexia);
const children = (
alexia[5][0] as {
content: { content: Array<Record<string, unknown>> };
}
).content.content;
const infoSection = children.find(
(c) =>
(c as { tag?: string }).tag === 'details' &&
Array.isArray((c as { content?: unknown[] }).content) &&
(c as { content: Array<{ content?: string }> }).content[0]?.content ===
'Character Information',
) as { content: Array<Record<string, unknown>> } | undefined;
assert.ok(infoSection);
const body = infoSection.content[1] as { content: Array<{ content?: string }> };
const flattened = JSON.stringify(body.content);
assert.match(flattened, /Female|♂ Male|♀ Female/);
assert.match(flattened, /15 years/);
assert.match(flattened, /Blood Type A/);
assert.match(flattened, /Birthday: September 1/);
} finally {
globalThis.fetch = originalFetch;
}
});
test('generateForCurrentMedia preserves duplicate surface forms across different characters', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
if (url === GRAPHQL_URL) {
const body = JSON.parse(String(init?.body ?? '{}')) as {
query?: string;
};
if (body.query?.includes('Page(perPage: 10)')) {
return new Response(
JSON.stringify({
data: {
Page: {
media: [
{
id: 130298,
episodes: 20,
title: {
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
english: 'The Eminence in Shadow',
native: '陰の実力者になりたくて!',
},
},
],
},
},
}),
{ status: 200, headers: { 'content-type': 'application/json' } },
);
}
if (body.query?.includes('characters(page: $page')) {
return new Response(
JSON.stringify({
data: {
Media: {
title: {
english: 'The Eminence in Shadow',
},
characters: {
pageInfo: { hasNextPage: false },
edges: [
{
role: 'MAIN',
node: {
id: 111,
description: 'First Alpha.',
image: null,
name: {
full: 'Alpha One',
native: 'アルファ',
first: 'Alpha',
last: 'One',
},
},
},
{
role: 'MAIN',
node: {
id: 222,
description: 'Second Alpha.',
image: null,
name: {
full: 'Alpha Two',
native: 'アルファ',
first: 'Alpha',
last: 'Two',
},
},
},
],
},
},
},
}),
{ status: 200, headers: { 'content-type': 'application/json' } },
);
}
}
throw new Error(`Unexpected fetch URL: ${url}`);
}) as typeof globalThis.fetch;
try {
const runtime = createCharacterDictionaryRuntimeService({
userDataPath,
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
guessAnilistMediaInfo: async () => ({
title: 'The Eminence in Shadow',
episode: 5,
source: 'fallback',
}),
now: () => 1_700_000_000_000,
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
const alphaEntries = termBank.filter(([term]) => term === 'アルファ');
assert.equal(alphaEntries.length, 2);
const glossaries = alphaEntries.map((entry) =>
JSON.stringify(
(
entry[5][0] as {
content: { content: Array<Record<string, unknown>> };
}
).content.content,
),
);
assert.ok(glossaries.some((value) => value.includes('First Alpha.')));
assert.ok(glossaries.some((value) => value.includes('Second Alpha.')));
} finally {
globalThis.fetch = originalFetch;
}
});
test('getOrCreateCurrentSnapshot persists and reuses normalized snapshot data', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;

View File

@@ -10,21 +10,21 @@ const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co';
const ANILIST_REQUEST_DELAY_MS = 2000;
const CHARACTER_IMAGE_DOWNLOAD_DELAY_MS = 250;
const HONORIFIC_SUFFIXES = [
'さん',
'様',
'先生',
'先輩',
'後輩',
'氏',
'君',
'くん',
'ちゃん',
'たん',
'坊',
'殿',
'博士',
'社長',
'部長',
{ term: 'さん', reading: 'さん' },
{ term: '様', reading: 'さま' },
{ term: '先生', reading: 'せんせい' },
{ term: '先輩', reading: 'せんぱい' },
{ term: '後輩', reading: 'こうはい' },
{ term: '氏', reading: 'し' },
{ term: '君', reading: 'くん' },
{ term: 'くん', reading: 'くん' },
{ term: 'ちゃん', reading: 'ちゃん' },
{ term: 'たん', reading: 'たん' },
{ term: '坊', reading: 'ぼう' },
{ term: '殿', reading: 'どの' },
{ term: '博士', reading: 'はかせ' },
{ term: '社長', reading: 'しゃちょう' },
{ term: '部長', reading: 'ぶちょう' },
] as const;
type CharacterDictionaryRole = 'main' | 'primary' | 'side' | 'appears';
@@ -45,6 +45,24 @@ type CharacterDictionarySnapshotImage = {
dataBase64: string;
};
type CharacterBirthday = [number, number];
type JapaneseNameParts = {
hasSpace: boolean;
original: string;
combined: string;
family: string | null;
given: string | null;
};
type NameReadings = {
hasSpace: boolean;
original: string;
full: string;
family: string;
given: string;
};
export type CharacterDictionarySnapshot = {
formatVersion: number;
mediaId: number;
@@ -55,7 +73,7 @@ export type CharacterDictionarySnapshot = {
images: CharacterDictionarySnapshotImage[];
};
const CHARACTER_DICTIONARY_FORMAT_VERSION = 14;
const CHARACTER_DICTIONARY_FORMAT_VERSION = 15;
const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary';
type AniListSearchResponse = {
@@ -103,8 +121,17 @@ type AniListCharacterPageResponse = {
large?: string | null;
medium?: string | null;
} | null;
gender?: string | null;
age?: string | number | null;
dateOfBirth?: {
month?: number | null;
day?: number | null;
} | null;
bloodType?: string | null;
name?: {
first?: string | null;
full?: string | null;
last?: string | null;
native?: string | null;
alternative?: Array<string | null> | null;
} | null;
@@ -124,11 +151,17 @@ type VoiceActorRecord = {
type CharacterRecord = {
id: number;
role: CharacterDictionaryRole;
firstNameHint: string;
fullName: string;
lastNameHint: string;
nativeName: string;
alternativeNames: string[];
bloodType: string;
birthday: CharacterBirthday | null;
description: string;
imageUrl: string | null;
age: string;
sex: string;
voiceActors: VoiceActorRecord[];
};
@@ -161,6 +194,16 @@ export type CharacterDictionarySnapshotResult = {
updatedAt: number;
};
export type CharacterDictionarySnapshotProgress = {
mediaId: number;
mediaTitle: string;
};
export type CharacterDictionarySnapshotProgressCallbacks = {
onChecking?: (progress: CharacterDictionarySnapshotProgress) => void;
onGenerating?: (progress: CharacterDictionarySnapshotProgress) => void;
};
export type MergedCharacterDictionaryBuildResult = {
zipPath: string;
revision: string;
@@ -263,6 +306,16 @@ function buildReading(term: string): string {
return katakanaToHiragana(compact);
}
function containsKanji(value: string): boolean {
for (const char of value) {
const code = char.charCodeAt(0);
if ((code >= 0x4e00 && code <= 0x9fff) || (code >= 0x3400 && code <= 0x4dbf)) {
return true;
}
}
return false;
}
function isRomanizedName(value: string): boolean {
return /^[A-Za-zĀĪŪĒŌÂÊÎÔÛāīūēōâêîôû'.\-\s]+$/.test(value);
}
@@ -484,6 +537,67 @@ function romanizedTokenToKatakana(token: string): string | null {
return output.length > 0 ? output : null;
}
function buildReadingFromRomanized(value: string): string {
const katakana = romanizedTokenToKatakana(value);
return katakana ? katakanaToHiragana(katakana) : '';
}
function buildReadingFromHint(value: string): string {
return buildReading(value) || buildReadingFromRomanized(value);
}
function scoreJapaneseNamePartLength(length: number): number {
if (length === 2) return 3;
if (length === 1 || length === 3) return 2;
if (length === 4) return 1;
return 0;
}
function inferJapaneseNameSplitIndex(
nameOriginal: string,
firstNameHint: string,
lastNameHint: string,
): number | null {
const chars = [...nameOriginal];
if (chars.length < 2) return null;
const familyHintLength = [...buildReadingFromHint(lastNameHint)].length;
const givenHintLength = [...buildReadingFromHint(firstNameHint)].length;
const totalHintLength = familyHintLength + givenHintLength;
const defaultBoundary = Math.round(chars.length / 2);
let bestIndex: number | null = null;
let bestScore = Number.NEGATIVE_INFINITY;
for (let index = 1; index < chars.length; index += 1) {
const familyLength = index;
const givenLength = chars.length - index;
let score =
scoreJapaneseNamePartLength(familyLength) + scoreJapaneseNamePartLength(givenLength);
if (chars.length >= 4 && familyLength >= 2 && givenLength >= 2) {
score += 1;
}
if (totalHintLength > 0) {
const expectedFamilyLength = (chars.length * familyHintLength) / totalHintLength;
score -= Math.abs(familyLength - expectedFamilyLength) * 1.5;
} else {
score -= Math.abs(familyLength - defaultBoundary) * 0.5;
}
if (familyLength === givenLength) {
score += 0.25;
}
if (score > bestScore) {
bestScore = score;
bestIndex = index;
}
}
return bestIndex;
}
function addRomanizedKanaAliases(values: Iterable<string>): string[] {
const aliases = new Set<string>();
for (const value of values) {
@@ -497,6 +611,166 @@ function addRomanizedKanaAliases(values: Iterable<string>): string[] {
return [...aliases];
}
function splitJapaneseName(
nameOriginal: string,
firstNameHint?: string,
lastNameHint?: string,
): JapaneseNameParts {
const trimmed = nameOriginal.trim();
if (!trimmed) {
return {
hasSpace: false,
original: '',
combined: '',
family: null,
given: null,
};
}
const normalizedSpace = trimmed.replace(/[\s\u3000]+/g, ' ').trim();
const spaceParts = normalizedSpace.split(' ').filter((part) => part.length > 0);
if (spaceParts.length === 2) {
const family = spaceParts[0]!;
const given = spaceParts[1]!;
return {
hasSpace: true,
original: normalizedSpace,
combined: `${family}${given}`,
family,
given,
};
}
const middleDotParts = trimmed
.split(/[・・·•]/)
.map((part) => part.trim())
.filter((part) => part.length > 0);
if (middleDotParts.length === 2) {
const family = middleDotParts[0]!;
const given = middleDotParts[1]!;
return {
hasSpace: true,
original: trimmed,
combined: `${family}${given}`,
family,
given,
};
}
const hintedFirst = firstNameHint?.trim() || '';
const hintedLast = lastNameHint?.trim() || '';
if (hintedFirst && hintedLast) {
const familyGiven = `${hintedLast}${hintedFirst}`;
if (trimmed === familyGiven) {
return {
hasSpace: true,
original: trimmed,
combined: familyGiven,
family: hintedLast,
given: hintedFirst,
};
}
const givenFamily = `${hintedFirst}${hintedLast}`;
if (trimmed === givenFamily) {
return {
hasSpace: true,
original: trimmed,
combined: givenFamily,
family: hintedFirst,
given: hintedLast,
};
}
}
if (hintedFirst && hintedLast && containsKanji(trimmed)) {
const splitIndex = inferJapaneseNameSplitIndex(trimmed, hintedFirst, hintedLast);
if (splitIndex != null) {
const chars = [...trimmed];
const family = chars.slice(0, splitIndex).join('');
const given = chars.slice(splitIndex).join('');
if (family && given) {
return {
hasSpace: true,
original: trimmed,
combined: trimmed,
family,
given,
};
}
}
}
return {
hasSpace: false,
original: trimmed,
combined: trimmed,
family: null,
given: null,
};
}
function generateNameReadings(
nameOriginal: string,
romanizedName: string,
firstNameHint?: string,
lastNameHint?: string,
): NameReadings {
const trimmed = nameOriginal.trim();
if (!trimmed) {
return {
hasSpace: false,
original: '',
full: '',
family: '',
given: '',
};
}
const nameParts = splitJapaneseName(trimmed, firstNameHint, lastNameHint);
if (!nameParts.hasSpace || !nameParts.family || !nameParts.given) {
const full = containsKanji(trimmed)
? buildReadingFromRomanized(romanizedName)
: buildReading(trimmed);
return {
hasSpace: false,
original: trimmed,
full,
family: full,
given: full,
};
}
const romanizedParts = romanizedName
.trim()
.split(/\s+/)
.filter((part) => part.length > 0);
const familyFromHints = buildReadingFromHint(lastNameHint || '');
const givenFromHints = buildReadingFromHint(firstNameHint || '');
const familyRomajiFallback = romanizedParts[0] || '';
const givenRomajiFallback = romanizedParts.slice(1).join(' ');
const family =
familyFromHints ||
(containsKanji(nameParts.family)
? buildReadingFromRomanized(familyRomajiFallback)
: buildReading(nameParts.family));
const given =
givenFromHints ||
(containsKanji(nameParts.given)
? buildReadingFromRomanized(givenRomajiFallback)
: buildReading(nameParts.given));
const full =
`${family}${given}` || buildReading(trimmed) || buildReadingFromRomanized(romanizedName);
return {
hasSpace: true,
original: nameParts.original,
full,
family,
given,
};
}
function expandRawNameVariants(rawName: string): string[] {
const trimmed = rawName.trim();
if (!trimmed) return [];
@@ -555,24 +829,125 @@ function buildNameTerms(character: CharacterRecord): string[] {
}
}
const nativeParts = splitJapaneseName(
character.nativeName,
character.firstNameHint,
character.lastNameHint,
);
if (nativeParts.family) {
base.add(nativeParts.family);
}
if (nativeParts.given) {
base.add(nativeParts.given);
}
const withHonorifics = new Set<string>();
for (const entry of base) {
withHonorifics.add(entry);
for (const suffix of HONORIFIC_SUFFIXES) {
withHonorifics.add(`${entry}${suffix}`);
withHonorifics.add(`${entry}${suffix.term}`);
}
}
for (const alias of addRomanizedKanaAliases(withHonorifics)) {
withHonorifics.add(alias);
for (const suffix of HONORIFIC_SUFFIXES) {
withHonorifics.add(`${alias}${suffix}`);
withHonorifics.add(`${alias}${suffix.term}`);
}
}
return [...withHonorifics].filter((entry) => entry.trim().length > 0);
}
const MONTH_NAMES: ReadonlyArray<[number, string]> = [
[1, 'January'],
[2, 'February'],
[3, 'March'],
[4, 'April'],
[5, 'May'],
[6, 'June'],
[7, 'July'],
[8, 'August'],
[9, 'September'],
[10, 'October'],
[11, 'November'],
[12, 'December'],
];
const SEX_DISPLAY: ReadonlyArray<[string, string]> = [
['m', '♂ Male'],
['f', '♀ Female'],
['male', '♂ Male'],
['female', '♀ Female'],
];
function formatBirthday(birthday: CharacterBirthday | null): string {
if (!birthday) return '';
const [month, day] = birthday;
const monthName = MONTH_NAMES.find(([m]) => m === month)?.[1] || 'Unknown';
return `${monthName} ${day}`;
}
function formatCharacterStats(character: CharacterRecord): string {
const parts: string[] = [];
const normalizedSex = character.sex.trim().toLowerCase();
const sexDisplay = SEX_DISPLAY.find(([key]) => key === normalizedSex)?.[1];
if (sexDisplay) parts.push(sexDisplay);
if (character.age.trim()) parts.push(`${character.age.trim()} years`);
if (character.bloodType.trim()) parts.push(`Blood Type ${character.bloodType.trim()}`);
const birthday = formatBirthday(character.birthday);
if (birthday) parts.push(`Birthday: ${birthday}`);
return parts.join(' • ');
}
function buildReadingForTerm(
term: string,
character: CharacterRecord,
readings: NameReadings,
nameParts: JapaneseNameParts,
): string {
for (const suffix of HONORIFIC_SUFFIXES) {
if (term.endsWith(suffix.term) && term.length > suffix.term.length) {
const baseTerm = term.slice(0, -suffix.term.length);
const baseReading = buildReadingForTerm(baseTerm, character, readings, nameParts);
return baseReading ? `${baseReading}${suffix.reading}` : '';
}
}
const compactNative = character.nativeName.replace(/[\s\u3000]+/g, '');
const noMiddleDotsNative = compactNative.replace(/[・・·•]/g, '');
if (
term === character.nativeName ||
term === compactNative ||
term === noMiddleDotsNative ||
term === nameParts.original ||
term === nameParts.combined
) {
return readings.full;
}
const familyCompact = nameParts.family?.replace(/[・・·•]/g, '') || '';
if (nameParts.family && (term === nameParts.family || term === familyCompact)) {
return readings.family;
}
const givenCompact = nameParts.given?.replace(/[・・·•]/g, '') || '';
if (nameParts.given && (term === nameParts.given || term === givenCompact)) {
return readings.given;
}
const compact = term.replace(/[\s\u3000]+/g, '');
if (hasKanaOnly(compact)) {
return buildReading(compact);
}
if (isRomanizedName(term)) {
return buildReadingFromRomanized(term) || readings.full;
}
return '';
}
function parseCharacterDescription(raw: string): {
fields: Array<{ key: string; value: string }>;
text: string;
@@ -623,16 +998,16 @@ function roleInfo(role: CharacterDictionaryRole): { tag: string; score: number }
function mapRole(input: string | null | undefined): CharacterDictionaryRole {
const value = (input || '').trim().toUpperCase();
if (value === 'MAIN') return 'main';
if (value === 'BACKGROUND') return 'appears';
if (value === 'SUPPORTING') return 'side';
return 'primary';
if (value === 'SUPPORTING') return 'primary';
if (value === 'BACKGROUND') return 'side';
return 'side';
}
function roleLabel(role: CharacterDictionaryRole): string {
if (role === 'main') return 'Main';
if (role === 'primary') return 'Primary';
if (role === 'side') return 'Side';
return 'Appears';
if (role === 'main') return 'Protagonist';
if (role === 'primary') return 'Main Character';
if (role === 'side') return 'Side Character';
return 'Minor Role';
}
function inferImageExt(contentType: string | null): string {
@@ -780,10 +1155,10 @@ function roleBadgeStyle(role: CharacterDictionaryRole): Record<string, string> {
fontWeight: 'bold',
color: '#fff',
};
if (role === 'main') return { ...base, backgroundColor: '#4a8c3f' };
if (role === 'primary') return { ...base, backgroundColor: '#5c82b0' };
if (role === 'side') return { ...base, backgroundColor: '#7889a0' };
return { ...base, backgroundColor: '#777' };
if (role === 'main') return { ...base, backgroundColor: '#4CAF50' };
if (role === 'primary') return { ...base, backgroundColor: '#2196F3' };
if (role === 'side') return { ...base, backgroundColor: '#FF9800' };
return { ...base, backgroundColor: '#9E9E9E' };
}
function buildCollapsibleSection(
@@ -939,10 +1314,11 @@ function createDefinitionGlossary(
content: {
tag: 'span',
style: roleBadgeStyle(character.role),
content: `${roleLabel(character.role)} Character`,
content: roleLabel(character.role),
},
});
const statsLine = formatCharacterStats(character);
if (descriptionText) {
content.push(
buildCollapsibleSection(
@@ -953,11 +1329,21 @@ function createDefinitionGlossary(
);
}
if (fields.length > 0) {
const fieldItems: Array<Record<string, unknown>> = fields.map((f) => ({
const fieldItems: Array<Record<string, unknown>> = [];
if (statsLine) {
fieldItems.push({
tag: 'li',
style: { fontWeight: 'bold' },
content: statsLine,
});
}
fieldItems.push(
...fields.map((f) => ({
tag: 'li',
content: `${f.key}: ${f.value}`,
}));
})),
);
if (fieldItems.length > 0) {
content.push(
buildCollapsibleSection(
'Character Information',
@@ -1248,12 +1634,21 @@ async function fetchCharactersForMedia(
node {
id
description(asHtml: false)
gender
age
dateOfBirth {
month
day
}
bloodType
image {
large
medium
}
name {
first
full
last
native
alternative
}
@@ -1287,7 +1682,9 @@ async function fetchCharactersForMedia(
for (const edge of edges) {
const node = edge?.node;
if (!node || typeof node.id !== 'number') continue;
const firstNameHint = node.name?.first?.trim() || '';
const fullName = node.name?.full?.trim() || '';
const lastNameHint = node.name?.last?.trim() || '';
const nativeName = node.name?.native?.trim() || '';
const alternativeNames = [
...new Set(
@@ -1297,7 +1694,7 @@ async function fetchCharactersForMedia(
.filter((value) => value.length > 0),
),
];
if (!fullName && !nativeName && alternativeNames.length === 0) continue;
if (!nativeName) continue;
const voiceActors: VoiceActorRecord[] = [];
for (const va of edge?.voiceActors ?? []) {
if (!va || typeof va.id !== 'number') continue;
@@ -1314,11 +1711,25 @@ async function fetchCharactersForMedia(
characters.push({
id: node.id,
role: mapRole(edge?.role),
firstNameHint,
fullName,
lastNameHint,
nativeName,
alternativeNames,
bloodType: node.bloodType?.trim() || '',
birthday:
typeof node.dateOfBirth?.month === 'number' && typeof node.dateOfBirth?.day === 'number'
? [node.dateOfBirth.month, node.dateOfBirth.day]
: null,
description: node.description || '',
imageUrl: node.image?.large || node.image?.medium || null,
age:
typeof node.age === 'string'
? node.age.trim()
: typeof node.age === 'number'
? String(node.age)
: '',
sex: node.gender?.trim() || '',
voiceActors,
});
}
@@ -1400,9 +1811,9 @@ function buildSnapshotFromCharacters(
) => boolean,
): CharacterDictionarySnapshot {
const termEntries: CharacterDictionaryTermEntry[] = [];
const seen = new Set<string>();
for (const character of characters) {
const seenTerms = new Set<string>();
const imagePath = imagesByCharacterId.get(character.id)?.path ?? null;
const vaImagePaths = new Map<number, string>();
for (const va of character.voiceActors) {
@@ -1417,11 +1828,21 @@ function buildSnapshotFromCharacters(
getCollapsibleSectionOpenState,
);
const candidateTerms = buildNameTerms(character);
const nameParts = splitJapaneseName(
character.nativeName,
character.firstNameHint,
character.lastNameHint,
);
const readings = generateNameReadings(
character.nativeName,
character.fullName,
character.firstNameHint,
character.lastNameHint,
);
for (const term of candidateTerms) {
const reading = buildReading(term);
const dedupeKey = `${term}|${reading}|${character.role}`;
if (seen.has(dedupeKey)) continue;
seen.add(dedupeKey);
if (seenTerms.has(term)) continue;
seenTerms.add(term);
const reading = buildReadingForTerm(term, character, readings, nameParts);
termEntries.push(buildTermEntry(term, reading, character.role, glossary));
}
}
@@ -1560,7 +1981,10 @@ function buildMergedRevision(mediaIds: number[], snapshots: CharacterDictionaryS
}
export function createCharacterDictionaryRuntimeService(deps: CharacterDictionaryRuntimeDeps): {
getOrCreateCurrentSnapshot: (targetPath?: string) => Promise<CharacterDictionarySnapshotResult>;
getOrCreateCurrentSnapshot: (
targetPath?: string,
progress?: CharacterDictionarySnapshotProgressCallbacks,
) => Promise<CharacterDictionarySnapshotResult>;
buildMergedDictionary: (mediaIds: number[]) => Promise<MergedCharacterDictionaryBuildResult>;
generateForCurrentMedia: (
targetPath?: string,
@@ -1606,6 +2030,7 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
mediaId: number,
mediaTitleHint?: string,
beforeRequest?: () => Promise<void>,
progress?: CharacterDictionarySnapshotProgressCallbacks,
): Promise<CharacterDictionarySnapshotResult> => {
const snapshotPath = getSnapshotPath(outputDir, mediaId);
const cachedSnapshot = readSnapshot(snapshotPath);
@@ -1620,6 +2045,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
};
}
progress?.onGenerating?.({
mediaId,
mediaTitle: mediaTitleHint || `AniList ${mediaId}`,
});
deps.logInfo?.(`[dictionary] snapshot miss for AniList ${mediaId}, fetching characters`);
const { mediaTitle: fetchedMediaTitle, characters } = await fetchCharactersForMedia(
@@ -1700,7 +2129,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
};
return {
getOrCreateCurrentSnapshot: async (targetPath?: string) => {
getOrCreateCurrentSnapshot: async (
targetPath?: string,
progress?: CharacterDictionarySnapshotProgressCallbacks,
) => {
let hasAniListRequest = false;
const waitForAniListRequestSlot = async (): Promise<void> => {
if (!hasAniListRequest) {
@@ -1710,7 +2142,16 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
await sleepMs(ANILIST_REQUEST_DELAY_MS);
};
const resolvedMedia = await resolveCurrentMedia(targetPath, waitForAniListRequestSlot);
return getOrCreateSnapshot(resolvedMedia.id, resolvedMedia.title, waitForAniListRequestSlot);
progress?.onChecking?.({
mediaId: resolvedMedia.id,
mediaTitle: resolvedMedia.title,
});
return getOrCreateSnapshot(
resolvedMedia.id,
resolvedMedia.title,
waitForAniListRequestSlot,
progress,
);
},
buildMergedDictionary: async (mediaIds: number[]) => {
const normalizedMediaIds = mediaIds

View File

@@ -0,0 +1,92 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import {
notifyCharacterDictionaryAutoSyncStatus,
type CharacterDictionaryAutoSyncNotificationEvent,
} from './character-dictionary-auto-sync-notifications';
function makeEvent(
phase: CharacterDictionaryAutoSyncNotificationEvent['phase'],
message: string,
): CharacterDictionaryAutoSyncNotificationEvent {
return {
phase,
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message,
};
}
test('auto sync notifications send osd updates for progress phases', () => {
const calls: string[] = [];
notifyCharacterDictionaryAutoSyncStatus(makeEvent('checking', 'checking'), {
getNotificationType: () => 'osd',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('generating', 'generating'), {
getNotificationType: () => 'osd',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('syncing', 'syncing'), {
getNotificationType: () => 'osd',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('importing', 'importing'), {
getNotificationType: () => 'osd',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('ready', 'ready'), {
getNotificationType: () => 'osd',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
assert.deepEqual(calls, [
'osd:checking',
'osd:generating',
'osd:syncing',
'osd:importing',
'osd:ready',
]);
});
test('auto sync notifications never send desktop notifications', () => {
const calls: string[] = [];
notifyCharacterDictionaryAutoSyncStatus(makeEvent('syncing', 'syncing'), {
getNotificationType: () => 'both',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('importing', 'importing'), {
getNotificationType: () => 'both',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('ready', 'ready'), {
getNotificationType: () => 'both',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
notifyCharacterDictionaryAutoSyncStatus(makeEvent('failed', 'failed'), {
getNotificationType: () => 'both',
showOsd: (message) => calls.push(`osd:${message}`),
showDesktopNotification: (title, options) =>
calls.push(`desktop:${title}:${options.body ?? ''}`),
});
assert.deepEqual(calls, ['osd:syncing', 'osd:importing', 'osd:ready', 'osd:failed']);
});

View File

@@ -0,0 +1,34 @@
import type { CharacterDictionaryAutoSyncStatusEvent } from './character-dictionary-auto-sync';
import type { StartupOsdSequencerCharacterDictionaryEvent } from './startup-osd-sequencer';
export type CharacterDictionaryAutoSyncNotificationEvent = CharacterDictionaryAutoSyncStatusEvent;
export interface CharacterDictionaryAutoSyncNotificationDeps {
getNotificationType: () => 'osd' | 'system' | 'both' | 'none' | undefined;
showOsd: (message: string) => void;
showDesktopNotification: (title: string, options: { body?: string }) => void;
startupOsdSequencer?: {
notifyCharacterDictionaryStatus: (event: StartupOsdSequencerCharacterDictionaryEvent) => void;
};
}
function shouldShowOsd(type: 'osd' | 'system' | 'both' | 'none' | undefined): boolean {
return type !== 'none';
}
export function notifyCharacterDictionaryAutoSyncStatus(
event: CharacterDictionaryAutoSyncNotificationEvent,
deps: CharacterDictionaryAutoSyncNotificationDeps,
): void {
const type = deps.getNotificationType();
if (shouldShowOsd(type)) {
if (deps.startupOsdSequencer) {
deps.startupOsdSequencer.notifyCharacterDictionaryStatus({
phase: event.phase,
message: event.message,
});
return;
}
deps.showOsd(event.message);
}
}

View File

@@ -9,6 +9,14 @@ function makeTempDir(): string {
return fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-char-dict-auto-sync-'));
}
function createDeferred<T>(): { promise: Promise<T>; resolve: (value: T) => void } {
let resolve!: (value: T) => void;
const promise = new Promise<T>((nextResolve) => {
resolve = nextResolve;
});
return { promise, resolve };
}
test('auto sync imports merged dictionary and persists MRU state', async () => {
const userDataPath = makeTempDir();
const imported: string[] = [];
@@ -267,3 +275,373 @@ test('auto sync evicts least recently used media from merged set', async () => {
};
assert.deepEqual(state.activeMediaIds, [4, 3, 2]);
});
test('auto sync invokes completion callback after successful sync', async () => {
const userDataPath = makeTempDir();
const completions: Array<{ mediaId: number; mediaTitle: string; changed: boolean }> = [];
let importedRevision: string | null = null;
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
userDataPath,
getConfig: () => ({
enabled: true,
maxLoaded: 3,
profileScope: 'all',
}),
getOrCreateCurrentSnapshot: async () => ({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
entryCount: 2560,
fromCache: false,
updatedAt: 1000,
}),
buildMergedDictionary: async () => ({
zipPath: '/tmp/merged.zip',
revision: 'rev-101291',
dictionaryTitle: 'SubMiner Character Dictionary',
entryCount: 2560,
}),
getYomitanDictionaryInfo: async () =>
importedRevision
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
: [],
importYomitanDictionary: async () => {
importedRevision = 'rev-101291';
return true;
},
deleteYomitanDictionary: async () => true,
upsertYomitanDictionarySettings: async () => true,
now: () => 1000,
onSyncComplete: (completion) => {
completions.push(completion);
},
});
await runtime.runSyncNow();
assert.deepEqual(completions, [
{
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
changed: true,
},
]);
});
test('auto sync emits progress events for start import and completion', async () => {
const userDataPath = makeTempDir();
const events: Array<{
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
mediaId?: number;
mediaTitle?: string;
message: string;
changed?: boolean;
}> = [];
let importedRevision: string | null = null;
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
userDataPath,
getConfig: () => ({
enabled: true,
maxLoaded: 3,
profileScope: 'all',
}),
getOrCreateCurrentSnapshot: async (_targetPath, progress) => {
progress?.onChecking?.({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
});
progress?.onGenerating?.({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
});
return {
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
entryCount: 2560,
fromCache: false,
updatedAt: 1000,
};
},
buildMergedDictionary: async () => ({
zipPath: '/tmp/merged.zip',
revision: 'rev-101291',
dictionaryTitle: 'SubMiner Character Dictionary',
entryCount: 2560,
}),
getYomitanDictionaryInfo: async () =>
importedRevision
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
: [],
importYomitanDictionary: async () => {
importedRevision = 'rev-101291';
return true;
},
deleteYomitanDictionary: async () => true,
upsertYomitanDictionarySettings: async () => true,
now: () => 1000,
onSyncStatus: (event) => {
events.push(event);
},
});
await runtime.runSyncNow();
assert.deepEqual(events, [
{
phase: 'checking',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Checking character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
},
{
phase: 'generating',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Generating character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
},
{
phase: 'syncing',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Updating character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
},
{
phase: 'building',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Building character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
},
{
phase: 'importing',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Importing character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
},
{
phase: 'ready',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Character dictionary ready for Rascal Does Not Dream of Bunny Girl Senpai',
changed: true,
},
]);
});
test('auto sync emits checking before snapshot resolves and skips generating on cache hit', async () => {
const userDataPath = makeTempDir();
const events: Array<{
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
mediaId?: number;
mediaTitle?: string;
message: string;
changed?: boolean;
}> = [];
const snapshotDeferred = createDeferred<{
mediaId: number;
mediaTitle: string;
entryCount: number;
fromCache: boolean;
updatedAt: number;
}>();
let importedRevision: string | null = null;
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
userDataPath,
getConfig: () => ({
enabled: true,
maxLoaded: 3,
profileScope: 'all',
}),
getOrCreateCurrentSnapshot: async (_targetPath, progress) => {
progress?.onChecking?.({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
});
return await snapshotDeferred.promise;
},
buildMergedDictionary: async () => ({
zipPath: '/tmp/merged.zip',
revision: 'rev-101291',
dictionaryTitle: 'SubMiner Character Dictionary',
entryCount: 2560,
}),
getYomitanDictionaryInfo: async () =>
importedRevision
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
: [],
importYomitanDictionary: async () => {
importedRevision = 'rev-101291';
return true;
},
deleteYomitanDictionary: async () => true,
upsertYomitanDictionarySettings: async () => true,
now: () => 1000,
onSyncStatus: (event) => {
events.push(event);
},
});
const syncPromise = runtime.runSyncNow();
await Promise.resolve();
assert.deepEqual(events, [
{
phase: 'checking',
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
message: 'Checking character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
},
]);
snapshotDeferred.resolve({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
entryCount: 2560,
fromCache: true,
updatedAt: 1000,
});
await syncPromise;
assert.equal(
events.some((event) => event.phase === 'generating'),
false,
);
});
test('auto sync emits building while merged dictionary generation is in flight', async () => {
const userDataPath = makeTempDir();
const events: Array<{
phase: 'checking' | 'generating' | 'building' | 'syncing' | 'importing' | 'ready' | 'failed';
mediaId?: number;
mediaTitle?: string;
message: string;
changed?: boolean;
}> = [];
const buildDeferred = createDeferred<{
zipPath: string;
revision: string;
dictionaryTitle: string;
entryCount: number;
}>();
let importedRevision: string | null = null;
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
userDataPath,
getConfig: () => ({
enabled: true,
maxLoaded: 3,
profileScope: 'all',
}),
getOrCreateCurrentSnapshot: async (_targetPath, progress) => {
progress?.onChecking?.({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
});
return {
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
entryCount: 2560,
fromCache: true,
updatedAt: 1000,
};
},
buildMergedDictionary: async () => await buildDeferred.promise,
getYomitanDictionaryInfo: async () =>
importedRevision
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
: [],
importYomitanDictionary: async () => {
importedRevision = 'rev-101291';
return true;
},
deleteYomitanDictionary: async () => true,
upsertYomitanDictionarySettings: async () => true,
now: () => 1000,
onSyncStatus: (event) => {
events.push(event);
},
});
const syncPromise = runtime.runSyncNow();
await Promise.resolve();
assert.equal(
events.some((event) => event.phase === 'building'),
true,
);
buildDeferred.resolve({
zipPath: '/tmp/merged.zip',
revision: 'rev-101291',
dictionaryTitle: 'SubMiner Character Dictionary',
entryCount: 2560,
});
await syncPromise;
});
test('auto sync waits for tokenization-ready gate before Yomitan mutations', async () => {
const userDataPath = makeTempDir();
const gate = (() => {
let resolve!: () => void;
const promise = new Promise<void>((nextResolve) => {
resolve = nextResolve;
});
return { promise, resolve };
})();
const calls: string[] = [];
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
userDataPath,
getConfig: () => ({
enabled: true,
maxLoaded: 3,
profileScope: 'all',
}),
getOrCreateCurrentSnapshot: async () => ({
mediaId: 101291,
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
entryCount: 2560,
fromCache: false,
updatedAt: 1000,
}),
buildMergedDictionary: async () => {
calls.push('build');
return {
zipPath: '/tmp/merged.zip',
revision: 'rev-101291',
dictionaryTitle: 'SubMiner Character Dictionary',
entryCount: 2560,
};
},
waitForYomitanMutationReady: async () => {
calls.push('wait');
await gate.promise;
},
getYomitanDictionaryInfo: async () => {
calls.push('info');
return [];
},
importYomitanDictionary: async () => {
calls.push('import');
return true;
},
deleteYomitanDictionary: async () => true,
upsertYomitanDictionarySettings: async () => {
calls.push('settings');
return true;
},
now: () => 1000,
});
const syncPromise = runtime.runSyncNow();
await Promise.resolve();
await Promise.resolve();
assert.deepEqual(calls, ['build', 'wait']);
gate.resolve();
await syncPromise;
assert.deepEqual(calls, ['build', 'wait', 'info', 'import', 'settings']);
});

View File

@@ -2,6 +2,7 @@ import * as fs from 'fs';
import * as path from 'path';
import type { AnilistCharacterDictionaryProfileScope } from '../../types';
import type {
CharacterDictionarySnapshotProgressCallbacks,
CharacterDictionarySnapshotResult,
MergedCharacterDictionaryBuildResult,
} from '../character-dictionary-runtime';
@@ -23,11 +24,23 @@ export interface CharacterDictionaryAutoSyncConfig {
profileScope: AnilistCharacterDictionaryProfileScope;
}
export interface CharacterDictionaryAutoSyncStatusEvent {
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
mediaId?: number;
mediaTitle?: string;
message: string;
changed?: boolean;
}
export interface CharacterDictionaryAutoSyncRuntimeDeps {
userDataPath: string;
getConfig: () => CharacterDictionaryAutoSyncConfig;
getOrCreateCurrentSnapshot: (targetPath?: string) => Promise<CharacterDictionarySnapshotResult>;
getOrCreateCurrentSnapshot: (
targetPath?: string,
progress?: CharacterDictionarySnapshotProgressCallbacks,
) => Promise<CharacterDictionarySnapshotResult>;
buildMergedDictionary: (mediaIds: number[]) => Promise<MergedCharacterDictionaryBuildResult>;
waitForYomitanMutationReady?: () => Promise<void>;
getYomitanDictionaryInfo: () => Promise<AutoSyncDictionaryInfo[]>;
importYomitanDictionary: (zipPath: string) => Promise<boolean>;
deleteYomitanDictionary: (dictionaryTitle: string) => Promise<boolean>;
@@ -41,6 +54,8 @@ export interface CharacterDictionaryAutoSyncRuntimeDeps {
operationTimeoutMs?: number;
logInfo?: (message: string) => void;
logWarn?: (message: string) => void;
onSyncStatus?: (event: CharacterDictionaryAutoSyncStatusEvent) => void;
onSyncComplete?: (result: { mediaId: number; mediaTitle: string; changed: boolean }) => void;
}
function ensureDir(dirPath: string): void {
@@ -92,6 +107,37 @@ function arraysEqual(left: number[], right: number[]): boolean {
return true;
}
function buildSyncingMessage(mediaTitle: string): string {
return `Updating character dictionary for ${mediaTitle}...`;
}
function buildCheckingMessage(mediaTitle: string): string {
return `Checking character dictionary for ${mediaTitle}...`;
}
function buildGeneratingMessage(mediaTitle: string): string {
return `Generating character dictionary for ${mediaTitle}...`;
}
function buildImportingMessage(mediaTitle: string): string {
return `Importing character dictionary for ${mediaTitle}...`;
}
function buildBuildingMessage(mediaTitle: string): string {
return `Building character dictionary for ${mediaTitle}...`;
}
function buildReadyMessage(mediaTitle: string): string {
return `Character dictionary ready for ${mediaTitle}`;
}
function buildFailedMessage(mediaTitle: string | null, errorMessage: string): string {
if (mediaTitle) {
return `Character dictionary sync failed for ${mediaTitle}: ${errorMessage}`;
}
return `Character dictionary sync failed: ${errorMessage}`;
}
export function createCharacterDictionaryAutoSyncRuntimeService(
deps: CharacterDictionaryAutoSyncRuntimeDeps,
): {
@@ -133,84 +179,156 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
return;
}
deps.logInfo?.('[dictionary:auto-sync] syncing current anime snapshot');
const snapshot = await deps.getOrCreateCurrentSnapshot();
const state = readAutoSyncState(statePath);
const nextActiveMediaIds = [
snapshot.mediaId,
...state.activeMediaIds.filter((mediaId) => mediaId !== snapshot.mediaId),
].slice(0, Math.max(1, Math.floor(config.maxLoaded)));
deps.logInfo?.(
`[dictionary:auto-sync] active AniList media set: ${nextActiveMediaIds.join(', ')}`,
);
let currentMediaId: number | undefined;
let currentMediaTitle: string | null = null;
const retainedChanged = !arraysEqual(nextActiveMediaIds, state.activeMediaIds);
let merged: MergedCharacterDictionaryBuildResult | null = null;
if (
retainedChanged ||
!state.mergedRevision ||
!state.mergedDictionaryTitle ||
!snapshot.fromCache
) {
deps.logInfo?.('[dictionary:auto-sync] rebuilding merged dictionary for active anime set');
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
}
try {
deps.logInfo?.('[dictionary:auto-sync] syncing current anime snapshot');
const snapshot = await deps.getOrCreateCurrentSnapshot(undefined, {
onChecking: ({ mediaId, mediaTitle }) => {
currentMediaId = mediaId;
currentMediaTitle = mediaTitle;
deps.onSyncStatus?.({
phase: 'checking',
mediaId,
mediaTitle,
message: buildCheckingMessage(mediaTitle),
});
},
onGenerating: ({ mediaId, mediaTitle }) => {
currentMediaId = mediaId;
currentMediaTitle = mediaTitle;
deps.onSyncStatus?.({
phase: 'generating',
mediaId,
mediaTitle,
message: buildGeneratingMessage(mediaTitle),
});
},
});
currentMediaId = snapshot.mediaId;
currentMediaTitle = snapshot.mediaTitle;
deps.onSyncStatus?.({
phase: 'syncing',
mediaId: snapshot.mediaId,
mediaTitle: snapshot.mediaTitle,
message: buildSyncingMessage(snapshot.mediaTitle),
});
const state = readAutoSyncState(statePath);
const nextActiveMediaIds = [
snapshot.mediaId,
...state.activeMediaIds.filter((mediaId) => mediaId !== snapshot.mediaId),
].slice(0, Math.max(1, Math.floor(config.maxLoaded)));
deps.logInfo?.(
`[dictionary:auto-sync] active AniList media set: ${nextActiveMediaIds.join(', ')}`,
);
const dictionaryTitle = merged?.dictionaryTitle ?? state.mergedDictionaryTitle;
const revision = merged?.revision ?? state.mergedRevision;
if (!dictionaryTitle || !revision) {
throw new Error('Merged character dictionary state is incomplete.');
}
const dictionaryInfo = await withOperationTimeout(
'getYomitanDictionaryInfo',
deps.getYomitanDictionaryInfo(),
);
const existing = dictionaryInfo.find((entry) => entry.title === dictionaryTitle) ?? null;
const existingRevision =
existing && (typeof existing.revision === 'string' || typeof existing.revision === 'number')
? String(existing.revision)
: null;
const shouldImport =
merged !== null ||
existing === null ||
existingRevision === null ||
existingRevision !== revision;
if (shouldImport) {
if (existing !== null) {
await withOperationTimeout(
`deleteYomitanDictionary(${dictionaryTitle})`,
deps.deleteYomitanDictionary(dictionaryTitle),
);
}
if (merged === null) {
const retainedChanged = !arraysEqual(nextActiveMediaIds, state.activeMediaIds);
let merged: MergedCharacterDictionaryBuildResult | null = null;
if (
retainedChanged ||
!state.mergedRevision ||
!state.mergedDictionaryTitle ||
!snapshot.fromCache
) {
deps.onSyncStatus?.({
phase: 'building',
mediaId: snapshot.mediaId,
mediaTitle: snapshot.mediaTitle,
message: buildBuildingMessage(snapshot.mediaTitle),
});
deps.logInfo?.('[dictionary:auto-sync] rebuilding merged dictionary for active anime set');
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
}
deps.logInfo?.(`[dictionary:auto-sync] importing merged dictionary: ${merged.zipPath}`);
const imported = await withOperationTimeout(
`importYomitanDictionary(${path.basename(merged.zipPath)})`,
deps.importYomitanDictionary(merged.zipPath),
);
if (!imported) {
throw new Error(`Failed to import dictionary ZIP: ${merged.zipPath}`);
const dictionaryTitle = merged?.dictionaryTitle ?? state.mergedDictionaryTitle;
const revision = merged?.revision ?? state.mergedRevision;
if (!dictionaryTitle || !revision) {
throw new Error('Merged character dictionary state is incomplete.');
}
await deps.waitForYomitanMutationReady?.();
const dictionaryInfo = await withOperationTimeout(
'getYomitanDictionaryInfo',
deps.getYomitanDictionaryInfo(),
);
const existing = dictionaryInfo.find((entry) => entry.title === dictionaryTitle) ?? null;
const existingRevision =
existing && (typeof existing.revision === 'string' || typeof existing.revision === 'number')
? String(existing.revision)
: null;
const shouldImport =
merged !== null ||
existing === null ||
existingRevision === null ||
existingRevision !== revision;
let changed = merged !== null;
if (shouldImport) {
deps.onSyncStatus?.({
phase: 'importing',
mediaId: snapshot.mediaId,
mediaTitle: snapshot.mediaTitle,
message: buildImportingMessage(snapshot.mediaTitle),
});
if (existing !== null) {
await withOperationTimeout(
`deleteYomitanDictionary(${dictionaryTitle})`,
deps.deleteYomitanDictionary(dictionaryTitle),
);
}
if (merged === null) {
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
}
deps.logInfo?.(`[dictionary:auto-sync] importing merged dictionary: ${merged.zipPath}`);
const imported = await withOperationTimeout(
`importYomitanDictionary(${path.basename(merged.zipPath)})`,
deps.importYomitanDictionary(merged.zipPath),
);
if (!imported) {
throw new Error(`Failed to import dictionary ZIP: ${merged.zipPath}`);
}
changed = true;
}
deps.logInfo?.(`[dictionary:auto-sync] applying Yomitan settings for ${dictionaryTitle}`);
const settingsUpdated = await withOperationTimeout(
`upsertYomitanDictionarySettings(${dictionaryTitle})`,
deps.upsertYomitanDictionarySettings(dictionaryTitle, config.profileScope),
);
changed = changed || settingsUpdated === true;
writeAutoSyncState(statePath, {
activeMediaIds: nextActiveMediaIds,
mergedRevision: merged?.revision ?? revision,
mergedDictionaryTitle: merged?.dictionaryTitle ?? dictionaryTitle,
});
deps.logInfo?.(
`[dictionary:auto-sync] synced AniList ${snapshot.mediaId}: ${dictionaryTitle} (${snapshot.entryCount} entries)`,
);
deps.onSyncStatus?.({
phase: 'ready',
mediaId: snapshot.mediaId,
mediaTitle: snapshot.mediaTitle,
message: buildReadyMessage(snapshot.mediaTitle),
changed,
});
deps.onSyncComplete?.({
mediaId: snapshot.mediaId,
mediaTitle: snapshot.mediaTitle,
changed,
});
} catch (error) {
const errorMessage = (error as Error)?.message ?? String(error);
deps.onSyncStatus?.({
phase: 'failed',
mediaId: currentMediaId,
mediaTitle: currentMediaTitle ?? undefined,
message: buildFailedMessage(currentMediaTitle, errorMessage),
});
throw error;
}
deps.logInfo?.(`[dictionary:auto-sync] applying Yomitan settings for ${dictionaryTitle}`);
await withOperationTimeout(
`upsertYomitanDictionarySettings(${dictionaryTitle})`,
deps.upsertYomitanDictionarySettings(dictionaryTitle, config.profileScope),
);
writeAutoSyncState(statePath, {
activeMediaIds: nextActiveMediaIds,
mergedRevision: merged?.revision ?? revision,
mergedDictionaryTitle: merged?.dictionaryTitle ?? dictionaryTitle,
});
deps.logInfo?.(
`[dictionary:auto-sync] synced AniList ${snapshot.mediaId}: ${dictionaryTitle} (${snapshot.entryCount} entries)`,
);
};
const enqueueSync = (): void => {

View File

@@ -25,7 +25,12 @@ test('createConfigHotReloadAppliedHandler runs all hot-reload effects', () => {
applyHotReload(
{
hotReloadFields: ['shortcuts', 'secondarySub.defaultMode', 'ankiConnect.ai'],
hotReloadFields: [
'shortcuts',
'secondarySub.defaultMode',
'ankiConnect.ai',
'subtitleStyle.autoPauseVideoOnHover',
],
restartRequiredFields: [],
},
config,

View File

@@ -0,0 +1,42 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { createCurrentMediaTokenizationGate } from './current-media-tokenization-gate';
test('current media tokenization gate waits until current path is marked ready', async () => {
const gate = createCurrentMediaTokenizationGate();
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
let resolved = false;
const waitPromise = gate.waitUntilReady('/tmp/video-1.mkv').then(() => {
resolved = true;
});
await Promise.resolve();
assert.equal(resolved, false);
gate.markReady('/tmp/video-1.mkv');
await waitPromise;
assert.equal(resolved, true);
});
test('current media tokenization gate resolves old waiters when media changes', async () => {
const gate = createCurrentMediaTokenizationGate();
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
let resolved = false;
const waitPromise = gate.waitUntilReady('/tmp/video-1.mkv').then(() => {
resolved = true;
});
gate.updateCurrentMediaPath('/tmp/video-2.mkv');
await waitPromise;
assert.equal(resolved, true);
});
test('current media tokenization gate returns immediately for ready media', async () => {
const gate = createCurrentMediaTokenizationGate();
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
gate.markReady('/tmp/video-1.mkv');
await gate.waitUntilReady('/tmp/video-1.mkv');
});

View File

@@ -0,0 +1,70 @@
function normalizeMediaPath(mediaPath: string | null | undefined): string | null {
if (typeof mediaPath !== 'string') {
return null;
}
const trimmed = mediaPath.trim();
return trimmed.length > 0 ? trimmed : null;
}
export function createCurrentMediaTokenizationGate(): {
updateCurrentMediaPath: (mediaPath: string | null | undefined) => void;
markReady: (mediaPath: string | null | undefined) => void;
waitUntilReady: (mediaPath: string | null | undefined) => Promise<void>;
} {
let currentMediaPath: string | null = null;
let readyMediaPath: string | null = null;
let pendingMediaPath: string | null = null;
let pendingPromise: Promise<void> | null = null;
let resolvePending: (() => void) | null = null;
const resolvePendingWaiter = (): void => {
resolvePending?.();
resolvePending = null;
pendingPromise = null;
pendingMediaPath = null;
};
const ensurePendingPromise = (mediaPath: string): Promise<void> => {
if (pendingMediaPath === mediaPath && pendingPromise) {
return pendingPromise;
}
resolvePendingWaiter();
pendingMediaPath = mediaPath;
pendingPromise = new Promise<void>((resolve) => {
resolvePending = resolve;
});
return pendingPromise;
};
return {
updateCurrentMediaPath: (mediaPath) => {
const normalizedPath = normalizeMediaPath(mediaPath);
if (normalizedPath === currentMediaPath) {
return;
}
currentMediaPath = normalizedPath;
readyMediaPath = null;
resolvePendingWaiter();
if (normalizedPath) {
ensurePendingPromise(normalizedPath);
}
},
markReady: (mediaPath) => {
const normalizedPath = normalizeMediaPath(mediaPath);
if (!normalizedPath) {
return;
}
readyMediaPath = normalizedPath;
if (pendingMediaPath === normalizedPath) {
resolvePendingWaiter();
}
},
waitUntilReady: async (mediaPath) => {
const normalizedPath = normalizeMediaPath(mediaPath) ?? currentMediaPath;
if (!normalizedPath || readyMediaPath === normalizedPath) {
return;
}
await ensurePendingPromise(normalizedPath);
},
};
}

View File

@@ -16,12 +16,14 @@ test('overlay window factory main deps builders return mapped handlers', () => {
setOverlayDebugVisualizationEnabled: (enabled) => calls.push(`debug:${enabled}`),
isOverlayVisible: (kind) => kind === 'visible',
tryHandleOverlayShortcutLocalFallback: () => false,
forwardTabToMpv: () => calls.push('forward-tab'),
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
});
const overlayDeps = buildOverlayDeps();
assert.equal(overlayDeps.isDev, true);
assert.equal(overlayDeps.isOverlayVisible('visible'), true);
overlayDeps.forwardTabToMpv();
const buildMainDeps = createBuildCreateMainWindowMainDepsHandler({
createOverlayWindow: () => ({ id: 'visible' }),
@@ -37,5 +39,5 @@ test('overlay window factory main deps builders return mapped handlers', () => {
const modalDeps = buildModalDeps();
modalDeps.setModalWindow(null);
assert.deepEqual(calls, ['set-main', 'set-modal']);
assert.deepEqual(calls, ['forward-tab', 'set-main', 'set-modal']);
});

View File

@@ -8,6 +8,7 @@ export function createBuildCreateOverlayWindowMainDepsHandler<TWindow>(deps: {
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
isOverlayVisible: (windowKind: 'visible' | 'modal') => boolean;
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
forwardTabToMpv: () => void;
onWindowClosed: (windowKind: 'visible' | 'modal') => void;
},
) => TWindow;
@@ -17,6 +18,7 @@ export function createBuildCreateOverlayWindowMainDepsHandler<TWindow>(deps: {
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
isOverlayVisible: (windowKind: 'visible' | 'modal') => boolean;
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
forwardTabToMpv: () => void;
onWindowClosed: (windowKind: 'visible' | 'modal') => void;
}) {
return () => ({
@@ -27,6 +29,7 @@ export function createBuildCreateOverlayWindowMainDepsHandler<TWindow>(deps: {
setOverlayDebugVisualizationEnabled: deps.setOverlayDebugVisualizationEnabled,
isOverlayVisible: deps.isOverlayVisible,
tryHandleOverlayShortcutLocalFallback: deps.tryHandleOverlayShortcutLocalFallback,
forwardTabToMpv: deps.forwardTabToMpv,
onWindowClosed: deps.onWindowClosed,
});
}

View File

@@ -15,6 +15,7 @@ test('create overlay window handler forwards options and kind', () => {
assert.equal(options.isDev, true);
assert.equal(options.isOverlayVisible('visible'), true);
assert.equal(options.isOverlayVisible('modal'), false);
options.forwardTabToMpv();
options.onRuntimeOptionsChanged();
options.setOverlayDebugVisualizationEnabled(true);
options.onWindowClosed(kind);
@@ -26,11 +27,18 @@ test('create overlay window handler forwards options and kind', () => {
setOverlayDebugVisualizationEnabled: (enabled) => calls.push(`debug:${enabled}`),
isOverlayVisible: (kind) => kind === 'visible',
tryHandleOverlayShortcutLocalFallback: () => false,
forwardTabToMpv: () => calls.push('forward-tab'),
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
});
assert.equal(createOverlayWindow('visible'), window);
assert.deepEqual(calls, ['kind:visible', 'runtime-options', 'debug:true', 'closed:visible']);
assert.deepEqual(calls, [
'kind:visible',
'forward-tab',
'runtime-options',
'debug:true',
'closed:visible',
]);
});
test('create main window handler stores visible window', () => {

View File

@@ -10,6 +10,7 @@ export function createCreateOverlayWindowHandler<TWindow>(deps: {
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
isOverlayVisible: (windowKind: OverlayWindowKind) => boolean;
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
forwardTabToMpv: () => void;
onWindowClosed: (windowKind: OverlayWindowKind) => void;
},
) => TWindow;
@@ -19,6 +20,7 @@ export function createCreateOverlayWindowHandler<TWindow>(deps: {
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
isOverlayVisible: (windowKind: OverlayWindowKind) => boolean;
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
forwardTabToMpv: () => void;
onWindowClosed: (windowKind: OverlayWindowKind) => void;
}) {
return (kind: OverlayWindowKind): TWindow => {
@@ -29,6 +31,7 @@ export function createCreateOverlayWindowHandler<TWindow>(deps: {
setOverlayDebugVisualizationEnabled: deps.setOverlayDebugVisualizationEnabled,
isOverlayVisible: deps.isOverlayVisible,
tryHandleOverlayShortcutLocalFallback: deps.tryHandleOverlayShortcutLocalFallback,
forwardTabToMpv: deps.forwardTabToMpv,
onWindowClosed: deps.onWindowClosed,
});
};

View File

@@ -19,6 +19,7 @@ test('overlay window runtime handlers compose create/main/modal handlers', () =>
},
isOverlayVisible: (kind) => kind === 'visible',
tryHandleOverlayShortcutLocalFallback: () => false,
forwardTabToMpv: () => calls.push('forward-tab'),
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
},
setMainWindow: (window) => {

View File

@@ -0,0 +1,159 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import {
createStartupOsdSequencer,
type StartupOsdSequencerCharacterDictionaryEvent,
} from './startup-osd-sequencer';
function makeDictionaryEvent(
phase: StartupOsdSequencerCharacterDictionaryEvent['phase'],
message: string,
): StartupOsdSequencerCharacterDictionaryEvent {
return {
phase,
message,
};
}
test('startup OSD keeps dictionary progress hidden until tokenization and annotation loading finish', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
osdMessages.push(message);
},
});
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
);
sequencer.showAnnotationLoading('Loading subtitle annotations |');
sequencer.markTokenizationReady();
assert.deepEqual(osdMessages, ['Loading subtitle annotations |']);
sequencer.showAnnotationLoading('Loading subtitle annotations /');
assert.deepEqual(osdMessages, [
'Loading subtitle annotations |',
'Loading subtitle annotations /',
]);
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
assert.deepEqual(osdMessages, [
'Loading subtitle annotations |',
'Loading subtitle annotations /',
'Updating character dictionary for Frieren...',
]);
});
test('startup OSD buffers checking behind annotations and replaces it with later generating progress', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
osdMessages.push(message);
},
});
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('checking', 'Checking character dictionary for Frieren...'),
);
sequencer.showAnnotationLoading('Loading subtitle annotations |');
sequencer.markTokenizationReady();
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('generating', 'Generating character dictionary for Frieren...'),
);
assert.deepEqual(osdMessages, ['Loading subtitle annotations |']);
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
assert.deepEqual(osdMessages, [
'Loading subtitle annotations |',
'Generating character dictionary for Frieren...',
]);
});
test('startup OSD replaces earlier dictionary progress with later building progress', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
osdMessages.push(message);
},
});
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
);
sequencer.showAnnotationLoading('Loading subtitle annotations |');
sequencer.markTokenizationReady();
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('building', 'Building character dictionary for Frieren...'),
);
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
assert.deepEqual(osdMessages, [
'Loading subtitle annotations |',
'Building character dictionary for Frieren...',
]);
});
test('startup OSD skips buffered dictionary ready messages when progress completed before it became visible', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
osdMessages.push(message);
},
});
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
);
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('ready', 'Character dictionary ready for Frieren'),
);
sequencer.markTokenizationReady();
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
assert.deepEqual(osdMessages, ['Subtitle annotations loaded']);
});
test('startup OSD shows dictionary failure after annotation loading completes', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
osdMessages.push(message);
},
});
sequencer.showAnnotationLoading('Loading subtitle annotations |');
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('failed', 'Character dictionary sync failed for Frieren: boom'),
);
sequencer.markTokenizationReady();
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
assert.deepEqual(osdMessages, [
'Loading subtitle annotations |',
'Character dictionary sync failed for Frieren: boom',
]);
});
test('startup OSD reset requires the next media to wait for tokenization again', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
osdMessages.push(message);
},
});
sequencer.markTokenizationReady();
sequencer.reset();
sequencer.notifyCharacterDictionaryStatus(
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
);
assert.deepEqual(osdMessages, []);
sequencer.markTokenizationReady();
assert.deepEqual(osdMessages, ['Updating character dictionary for Frieren...']);
});

View File

@@ -0,0 +1,107 @@
export interface StartupOsdSequencerCharacterDictionaryEvent {
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
message: string;
}
export function createStartupOsdSequencer(deps: { showOsd: (message: string) => void }): {
reset: () => void;
markTokenizationReady: () => void;
showAnnotationLoading: (message: string) => void;
markAnnotationLoadingComplete: (message: string) => void;
notifyCharacterDictionaryStatus: (event: StartupOsdSequencerCharacterDictionaryEvent) => void;
} {
let tokenizationReady = false;
let annotationLoadingMessage: string | null = null;
let pendingDictionaryProgress: StartupOsdSequencerCharacterDictionaryEvent | null = null;
let pendingDictionaryFailure: StartupOsdSequencerCharacterDictionaryEvent | null = null;
let dictionaryProgressShown = false;
const canShowDictionaryStatus = (): boolean =>
tokenizationReady && annotationLoadingMessage === null;
const flushBufferedDictionaryStatus = (): boolean => {
if (!canShowDictionaryStatus()) {
return false;
}
if (pendingDictionaryProgress) {
deps.showOsd(pendingDictionaryProgress.message);
dictionaryProgressShown = true;
return true;
}
if (pendingDictionaryFailure) {
deps.showOsd(pendingDictionaryFailure.message);
pendingDictionaryFailure = null;
dictionaryProgressShown = false;
return true;
}
return false;
};
return {
reset: () => {
tokenizationReady = false;
annotationLoadingMessage = null;
pendingDictionaryProgress = null;
pendingDictionaryFailure = null;
dictionaryProgressShown = false;
},
markTokenizationReady: () => {
tokenizationReady = true;
if (annotationLoadingMessage !== null) {
deps.showOsd(annotationLoadingMessage);
return;
}
flushBufferedDictionaryStatus();
},
showAnnotationLoading: (message) => {
annotationLoadingMessage = message;
if (tokenizationReady) {
deps.showOsd(message);
}
},
markAnnotationLoadingComplete: (message) => {
annotationLoadingMessage = null;
if (!tokenizationReady) {
return;
}
if (flushBufferedDictionaryStatus()) {
return;
}
deps.showOsd(message);
},
notifyCharacterDictionaryStatus: (event) => {
if (
event.phase === 'checking' ||
event.phase === 'generating' ||
event.phase === 'syncing' ||
event.phase === 'building' ||
event.phase === 'importing'
) {
pendingDictionaryProgress = event;
pendingDictionaryFailure = null;
if (canShowDictionaryStatus()) {
deps.showOsd(event.message);
dictionaryProgressShown = true;
}
return;
}
pendingDictionaryProgress = null;
if (event.phase === 'failed') {
if (canShowDictionaryStatus()) {
deps.showOsd(event.message);
} else {
pendingDictionaryFailure = event;
}
dictionaryProgressShown = false;
return;
}
pendingDictionaryFailure = null;
if (canShowDictionaryStatus() && dictionaryProgressShown) {
deps.showOsd(event.message);
}
dictionaryProgressShown = false;
},
};
}

View File

@@ -80,6 +80,8 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
ensureJlptDictionaryLookup: () => Promise<void>;
ensureFrequencyDictionaryLookup: () => Promise<void>;
showMpvOsd?: (message: string) => void;
showLoadingOsd?: (message: string) => void;
showLoadedOsd?: (message: string) => void;
shouldShowOsdNotification?: () => boolean;
setInterval?: (callback: () => void, delayMs: number) => unknown;
clearInterval?: (timer: unknown) => void;
@@ -90,6 +92,8 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
let loadingOsdFrame = 0;
let loadingOsdTimer: unknown = null;
const showMpvOsd = deps.showMpvOsd;
const showLoadingOsd = deps.showLoadingOsd ?? showMpvOsd;
const showLoadedOsd = deps.showLoadedOsd ?? showMpvOsd;
const setIntervalHandler =
deps.setInterval ??
((callback: () => void, delayMs: number): unknown => setInterval(callback, delayMs));
@@ -99,7 +103,7 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
const spinnerFrames = ['|', '/', '-', '\\'];
const beginLoadingOsd = (): boolean => {
if (!showMpvOsd) {
if (!showLoadingOsd) {
return false;
}
loadingOsdDepth += 1;
@@ -108,13 +112,13 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
}
loadingOsdFrame = 0;
showMpvOsd(`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame]}`);
showLoadingOsd(`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame]}`);
loadingOsdFrame += 1;
loadingOsdTimer = setIntervalHandler(() => {
if (!showMpvOsd) {
if (!showLoadingOsd) {
return;
}
showMpvOsd(
showLoadingOsd(
`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame % spinnerFrames.length]}`,
);
loadingOsdFrame += 1;
@@ -123,7 +127,7 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
};
const endLoadingOsd = (): void => {
if (!showMpvOsd) {
if (!showLoadedOsd) {
return;
}
@@ -136,7 +140,7 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
clearIntervalHandler(loadingOsdTimer);
loadingOsdTimer = null;
}
showMpvOsd('Subtitle annotations loaded');
showLoadedOsd('Subtitle annotations loaded');
};
return async (options?: { showLoadingOsd?: boolean }): Promise<void> => {

View File

@@ -38,18 +38,16 @@ test('release package scripts disable implicit electron-builder publishing', ()
assert.match(packageJson.scripts['build:appimage'] ?? '', /--publish never/);
assert.match(packageJson.scripts['build:mac'] ?? '', /--publish never/);
assert.match(packageJson.scripts['build:win'] ?? '', /--publish never/);
assert.match(packageJson.scripts['build:win:unsigned'] ?? '', /build-win-unsigned\.mjs/);
});
test('windows release workflow retries SignPath submission and fails only after exhausting attempts', () => {
assert.match(releaseWorkflow, /Submit Windows signing request \(attempt 1\)/);
assert.match(releaseWorkflow, /Submit Windows signing request \(attempt 2\)/);
assert.match(releaseWorkflow, /Submit Windows signing request \(attempt 3\)/);
assert.match(releaseWorkflow, /All SignPath signing attempts failed; rerun the workflow when SignPath is healthy\./);
});
test('windows release workflow pins the SignPath artifact configuration slug explicitly', () => {
assert.match(releaseWorkflow, /SIGNPATH_ARTIFACT_CONFIGURATION_SLUG/);
assert.match(releaseWorkflow, /artifact-configuration-slug: \$\{\{ secrets\.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG \}\}/);
test('windows release workflow publishes unsigned artifacts directly without SignPath', () => {
assert.match(releaseWorkflow, /Build unsigned Windows artifacts/);
assert.match(releaseWorkflow, /run: bun run build:win:unsigned/);
assert.match(releaseWorkflow, /name: windows/);
assert.match(releaseWorkflow, /path: \|\n\s+release\/\*\.exe\n\s+release\/\*\.zip/);
assert.ok(!releaseWorkflow.includes('signpath/github-action-submit-signing-request'));
assert.ok(!releaseWorkflow.includes('SIGNPATH_'));
});
test('Makefile routes Windows install-plugin setup through bun and documents Windows builds', () => {

View File

@@ -38,6 +38,7 @@ function createContext(subtitleHeight: number) {
state: {
currentYPercent: null,
persistedSubtitlePosition: { yPercent: 10 },
isOverSubtitle: false,
},
};
}

View File

@@ -84,6 +84,19 @@ function getNextPersistedPosition(
};
}
function applyMarginBottom(ctx: RendererContext, yPercent: number): void {
const clampedPercent = clampYPercent(ctx, yPercent);
ctx.state.currentYPercent = clampedPercent;
const marginBottom = (clampedPercent / 100) * getViewportHeight();
ctx.dom.subtitleContainer.style.position = '';
ctx.dom.subtitleContainer.style.left = '';
ctx.dom.subtitleContainer.style.top = '';
ctx.dom.subtitleContainer.style.right = '';
ctx.dom.subtitleContainer.style.transform = '';
ctx.dom.subtitleContainer.style.marginBottom = `${marginBottom}px`;
}
export function createInMemorySubtitlePositionController(
ctx: RendererContext,
): SubtitlePositionController {
@@ -98,16 +111,7 @@ export function createInMemorySubtitlePositionController(
}
function applyYPercent(yPercent: number): void {
const clampedPercent = clampYPercent(ctx, yPercent);
ctx.state.currentYPercent = clampedPercent;
const marginBottom = (clampedPercent / 100) * getViewportHeight();
ctx.dom.subtitleContainer.style.position = '';
ctx.dom.subtitleContainer.style.left = '';
ctx.dom.subtitleContainer.style.top = '';
ctx.dom.subtitleContainer.style.right = '';
ctx.dom.subtitleContainer.style.transform = '';
ctx.dom.subtitleContainer.style.marginBottom = `${marginBottom}px`;
applyMarginBottom(ctx, yPercent);
}
function persistSubtitlePositionPatch(patch: Partial<SubtitlePosition>): void {

View File

@@ -374,7 +374,8 @@ async function init(): Promise<void> {
await keyboardHandlers.setupMpvInputForwarding();
subtitleRenderer.applySubtitleStyle(await window.electronAPI.getSubtitleStyle());
const initialSubtitleStyle = await window.electronAPI.getSubtitleStyle();
subtitleRenderer.applySubtitleStyle(initialSubtitleStyle);
positioning.applyStoredSubtitlePosition(
await window.electronAPI.getSubtitlePosition(),

View File

@@ -0,0 +1,108 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import {
parseHyprctlClients,
selectHyprlandMpvWindow,
type HyprlandClient,
} from './hyprland-tracker';
function makeClient(overrides: Partial<HyprlandClient> = {}): HyprlandClient {
return {
address: '0x1',
class: 'mpv',
initialClass: 'mpv',
at: [0, 0],
size: [1280, 720],
mapped: true,
hidden: false,
...overrides,
};
}
test('selectHyprlandMpvWindow ignores hidden and unmapped mpv clients', () => {
const selected = selectHyprlandMpvWindow(
[
makeClient({
address: '0xhidden',
hidden: true,
}),
makeClient({
address: '0xunmapped',
mapped: false,
}),
makeClient({
address: '0xvisible',
at: [100, 200],
size: [1920, 1080],
}),
],
{
targetMpvSocketPath: null,
activeWindowAddress: null,
getWindowCommandLine: () => null,
},
);
assert.equal(selected?.address, '0xvisible');
});
test('selectHyprlandMpvWindow prefers active visible window among socket matches', () => {
const commandLines = new Map<string, string>([
['10', 'mpv --input-ipc-server=/tmp/subminer.sock first.mkv'],
['20', 'mpv --input-ipc-server=/tmp/subminer.sock second.mkv'],
]);
const selected = selectHyprlandMpvWindow(
[
makeClient({
address: '0xfirst',
pid: 10,
}),
makeClient({
address: '0xsecond',
pid: 20,
}),
],
{
targetMpvSocketPath: '/tmp/subminer.sock',
activeWindowAddress: '0xsecond',
getWindowCommandLine: (pid) => commandLines.get(String(pid)) ?? null,
},
);
assert.equal(selected?.address, '0xsecond');
});
test('selectHyprlandMpvWindow matches mpv by initialClass when class is blank', () => {
const selected = selectHyprlandMpvWindow(
[
makeClient({
address: '0xinitial',
class: '',
initialClass: 'mpv',
}),
],
{
targetMpvSocketPath: null,
activeWindowAddress: null,
getWindowCommandLine: () => null,
},
);
assert.equal(selected?.address, '0xinitial');
});
test('parseHyprctlClients tolerates non-json prefix output', () => {
const clients = parseHyprctlClients(`ok
[{"address":"0x1","class":"mpv","initialClass":"mpv","at":[1,2],"size":[3,4]}]`);
assert.deepEqual(clients, [
{
address: '0x1',
class: 'mpv',
initialClass: 'mpv',
at: [1, 2],
size: [3, 4],
},
]);
});

View File

@@ -23,17 +23,120 @@ import { createLogger } from '../logger';
const log = createLogger('tracker').child('hyprland');
interface HyprlandClient {
export interface HyprlandClient {
address?: string;
class: string;
initialClass?: string;
at: [number, number];
size: [number, number];
pid?: number;
mapped?: boolean;
hidden?: boolean;
}
interface SelectHyprlandMpvWindowOptions {
targetMpvSocketPath: string | null;
activeWindowAddress: string | null;
getWindowCommandLine: (pid: number) => string | null;
}
function extractHyprctlJsonPayload(output: string): string | null {
const trimmed = output.trim();
if (!trimmed) {
return null;
}
const arrayStart = trimmed.indexOf('[');
const objectStart = trimmed.indexOf('{');
const startCandidates = [arrayStart, objectStart].filter((index) => index >= 0);
if (startCandidates.length === 0) {
return null;
}
const startIndex = Math.min(...startCandidates);
return trimmed.slice(startIndex);
}
function matchesTargetSocket(commandLine: string, targetMpvSocketPath: string): boolean {
return (
commandLine.includes(`--input-ipc-server=${targetMpvSocketPath}`) ||
commandLine.includes(`--input-ipc-server ${targetMpvSocketPath}`)
);
}
function preferActiveHyprlandWindow(
clients: HyprlandClient[],
activeWindowAddress: string | null,
): HyprlandClient | null {
if (activeWindowAddress) {
const activeClient = clients.find((client) => client.address === activeWindowAddress);
if (activeClient) {
return activeClient;
}
}
return clients[0] ?? null;
}
function isMpvClassName(value: string | undefined): boolean {
if (!value) {
return false;
}
return value.trim().toLowerCase().includes('mpv');
}
export function selectHyprlandMpvWindow(
clients: HyprlandClient[],
options: SelectHyprlandMpvWindowOptions,
): HyprlandClient | null {
const visibleMpvWindows = clients.filter(
(client) =>
(isMpvClassName(client.class) || isMpvClassName(client.initialClass)) &&
client.mapped !== false &&
client.hidden !== true,
);
if (!options.targetMpvSocketPath) {
return preferActiveHyprlandWindow(visibleMpvWindows, options.activeWindowAddress);
}
const targetMpvSocketPath = options.targetMpvSocketPath;
const matchingWindows = visibleMpvWindows.filter((client) => {
if (!client.pid) {
return false;
}
const commandLine = options.getWindowCommandLine(client.pid);
if (!commandLine) {
return false;
}
return matchesTargetSocket(commandLine, targetMpvSocketPath);
});
return preferActiveHyprlandWindow(matchingWindows, options.activeWindowAddress);
}
export function parseHyprctlClients(output: string): HyprlandClient[] | null {
const jsonPayload = extractHyprctlJsonPayload(output);
if (!jsonPayload) {
return null;
}
const parsed = JSON.parse(jsonPayload) as unknown;
if (!Array.isArray(parsed)) {
return null;
}
return parsed as HyprlandClient[];
}
export class HyprlandWindowTracker extends BaseWindowTracker {
private pollInterval: ReturnType<typeof setInterval> | null = null;
private eventSocket: net.Socket | null = null;
private readonly targetMpvSocketPath: string | null;
private activeWindowAddress: string | null = null;
constructor(targetMpvSocketPath?: string) {
super();
@@ -75,15 +178,7 @@ export class HyprlandWindowTracker extends BaseWindowTracker {
this.eventSocket.on('data', (data: Buffer) => {
const events = data.toString().split('\n');
for (const event of events) {
if (
event.includes('movewindow') ||
event.includes('windowtitle') ||
event.includes('openwindow') ||
event.includes('closewindow') ||
event.includes('fullscreen')
) {
this.pollGeometry();
}
this.handleSocketEvent(event);
}
});
@@ -98,10 +193,47 @@ export class HyprlandWindowTracker extends BaseWindowTracker {
this.eventSocket.connect(socketPath);
}
private handleSocketEvent(event: string): void {
const trimmedEvent = event.trim();
if (!trimmedEvent) {
return;
}
const [name, rawData = ''] = trimmedEvent.split('>>', 2);
const data = rawData.trim();
if (name === 'activewindowv2') {
this.activeWindowAddress = data || null;
this.pollGeometry();
return;
}
if (name === 'closewindow' && data === this.activeWindowAddress) {
this.activeWindowAddress = null;
}
if (
name === 'movewindow' ||
name === 'movewindowv2' ||
name === 'windowtitle' ||
name === 'windowtitlev2' ||
name === 'openwindow' ||
name === 'closewindow' ||
name === 'fullscreen' ||
name === 'changefloatingmode'
) {
this.pollGeometry();
}
}
private pollGeometry(): void {
try {
const output = execSync('hyprctl clients -j', { encoding: 'utf-8' });
const clients: HyprlandClient[] = JSON.parse(output);
const output = execSync('hyprctl -j clients', { encoding: 'utf-8' });
const clients = parseHyprctlClients(output);
if (!clients) {
this.updateGeometry(null);
return;
}
const mpvWindow = this.findTargetWindow(clients);
if (mpvWindow) {
@@ -120,30 +252,11 @@ export class HyprlandWindowTracker extends BaseWindowTracker {
}
private findTargetWindow(clients: HyprlandClient[]): HyprlandClient | null {
const mpvWindows = clients.filter((client) => client.class === 'mpv');
if (!this.targetMpvSocketPath) {
return mpvWindows[0] || null;
}
for (const mpvWindow of mpvWindows) {
if (!mpvWindow.pid) {
continue;
}
const commandLine = this.getWindowCommandLine(mpvWindow.pid);
if (!commandLine) {
continue;
}
if (
commandLine.includes(`--input-ipc-server=${this.targetMpvSocketPath}`) ||
commandLine.includes(`--input-ipc-server ${this.targetMpvSocketPath}`)
) {
return mpvWindow;
}
}
return null;
return selectHyprlandMpvWindow(clients, {
targetMpvSocketPath: this.targetMpvSocketPath,
activeWindowAddress: this.activeWindowAddress,
getWindowCommandLine: (pid) => this.getWindowCommandLine(pid),
});
}
private getWindowCommandLine(pid: number): string | null {

View File

@@ -18,11 +18,51 @@ Height: 720
});
});
test('parseX11WindowGeometry preserves negative coordinates', () => {
const geometry = parseX11WindowGeometry(`
Absolute upper-left X: -1920
Absolute upper-left Y: -24
Width: 1920
Height: 1080
`);
assert.deepEqual(geometry, {
x: -1920,
y: -24,
width: 1920,
height: 1080,
});
});
test('parseX11WindowPid parses xprop output', () => {
assert.equal(parseX11WindowPid('_NET_WM_PID(CARDINAL) = 4242'), 4242);
assert.equal(parseX11WindowPid('_NET_WM_PID(CARDINAL) = not-a-number'), null);
});
test('X11WindowTracker searches only visible mpv windows', async () => {
const commands: Array<{ command: string; args: string[] }> = [];
const tracker = new X11WindowTracker(undefined, async (command, args) => {
commands.push({ command, args });
if (command === 'xdotool') {
return '123';
}
if (command === 'xwininfo') {
return `Absolute upper-left X: 0
Absolute upper-left Y: 0
Width: 640
Height: 360`;
}
return '';
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.deepEqual(commands[0], {
command: 'xdotool',
args: ['search', '--onlyvisible', '--class', 'mpv'],
});
});
test('X11WindowTracker skips overlapping polls while one command is in flight', async () => {
let commandCalls = 0;
let release: (() => void) | undefined;

View File

@@ -39,8 +39,8 @@ export function parseX11WindowGeometry(winInfo: string): {
width: number;
height: number;
} | null {
const xMatch = winInfo.match(/Absolute upper-left X:\s*(\d+)/);
const yMatch = winInfo.match(/Absolute upper-left Y:\s*(\d+)/);
const xMatch = winInfo.match(/Absolute upper-left X:\s*(-?\d+)/);
const yMatch = winInfo.match(/Absolute upper-left Y:\s*(-?\d+)/);
const widthMatch = winInfo.match(/Width:\s*(\d+)/);
const heightMatch = winInfo.match(/Height:\s*(\d+)/);
if (!xMatch || !yMatch || !widthMatch || !heightMatch) {
@@ -112,7 +112,12 @@ export class X11WindowTracker extends BaseWindowTracker {
}
private async pollGeometryAsync(): Promise<void> {
const windowIdsOutput = await this.runCommand('xdotool', ['search', '--class', 'mpv']);
const windowIdsOutput = await this.runCommand('xdotool', [
'search',
'--onlyvisible',
'--class',
'mpv',
]);
const windowIds = windowIdsOutput.trim();
if (!windowIds) {
this.updateGeometry(null);