Compare commits

..

1 Commits

Author SHA1 Message Date
e18985fb14 Enhance AniList character dictionary sync and subtitle features (#15) 2026-03-07 18:30:59 -08:00
584 changed files with 7995 additions and 167762 deletions

View File

@@ -31,7 +31,8 @@ jobs:
path: |
~/.bun/install/cache
node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
vendor/subminer-yomitan/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
restore-keys: |
${{ runner.os }}-bun-

View File

@@ -31,22 +31,23 @@ jobs:
with:
node-version: 22.12.0
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Cache dependencies
uses: actions/cache@v4
with:
path: |
~/.bun/install/cache
node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
vendor/subminer-yomitan/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
restore-keys: |
${{ runner.os }}-bun-
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Build (TypeScript check)
run: bun run typecheck
- name: Test suite (source)
run: bun run test:fast
@@ -84,6 +85,11 @@ jobs:
with:
bun-version: 1.3.5
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22.12.0
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -91,7 +97,8 @@ jobs:
~/.bun/install/cache
node_modules
vendor/texthooker-ui/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json') }}
vendor/subminer-yomitan/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
restore-keys: |
${{ runner.os }}-bun-
@@ -140,6 +147,11 @@ jobs:
with:
bun-version: 1.3.5
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22.12.0
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -147,7 +159,8 @@ jobs:
~/.bun/install/cache
node_modules
vendor/texthooker-ui/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json') }}
vendor/subminer-yomitan/node_modules
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
restore-keys: |
${{ runner.os }}-bun-

2
.gitignore vendored
View File

@@ -5,6 +5,7 @@ node_modules/
out/
dist/
release/
build/yomitan/
# Launcher build artifact (produced by make build-launcher)
/subminer
@@ -36,3 +37,4 @@ tests/*
.worktrees/
.codex/*
.agents/*
docs/*

6
.gitmodules vendored
View File

@@ -5,6 +5,6 @@
[submodule "vendor/yomitan-jlpt-vocab"]
path = vendor/yomitan-jlpt-vocab
url = https://github.com/stephenmk/yomitan-jlpt-vocab
[submodule "yomitan-jlpt-vocab"]
path = vendor/yomitan-jlpt-vocab
url = https://github.com/stephenmk/yomitan-jlpt-vocab
[submodule "vendor/subminer-yomitan"]
path = vendor/subminer-yomitan
url = https://github.com/ksyasuda/subminer-yomitan

View File

@@ -1,3 +1,4 @@
<!-- BACKLOG.MD MCP GUIDELINES START -->
<CRITICAL_INSTRUCTION>
@@ -16,7 +17,6 @@ This project uses Backlog.md MCP for all task and project management activities.
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
These guides cover:
- Decision framework for when to create tasks
- Search-first workflow to avoid duplicates
- Links to detailed guides for task creation, execution, and finalization

View File

@@ -98,7 +98,7 @@ ensure-bun:
@command -v bun >/dev/null 2>&1 || { printf '%s\n' "[ERROR] bun not found"; exit 1; }
pretty: ensure-bun
@bun run format
@bun run format:src
build:
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"

View File

@@ -33,6 +33,7 @@ SubMiner is an Electron overlay that sits on top of mpv. It turns your video pla
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
- **Annotated websocket API** — Dedicated annotation feed can serve bundled texthooker or external clients with rendered `sentence` HTML plus structured `tokens`
- **Jellyfin integration** — Remote playback setup, cast device mode, and direct playback launch
- **AniList progress** — Track episode completion and push watching progress automatically
@@ -53,30 +54,36 @@ chmod +x ~/.local/bin/subminer
> [!NOTE]
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
**From source** or **macOS**see the [installation guide](https://docs.subminer.moe/installation#from-source).
**From source** or **macOS**initialize submodules first (`git submodule update --init --recursive`). Source builds now also require Node.js 22 + npm because bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
### 2. Install the mpv plugin and configuration file
### 2. Launch the app once
```bash
wget https://github.com/ksyasuda/SubMiner/releases/latest/download/subminer-assets.tar.gz -O /tmp/subminer-assets.tar.gz
tar -xzf /tmp/subminer-assets.tar.gz -C /tmp
mkdir -p ~/.config/mpv/scripts/subminer
mkdir -p ~/.config/mpv/script-opts
cp -R /tmp/plugin/subminer/. ~/.config/mpv/scripts/subminer/
cp /tmp/plugin/subminer.conf ~/.config/mpv/script-opts/
mkdir -p ~/.config/SubMiner && cp /tmp/config.example.jsonc ~/.config/SubMiner/config.jsonc
SubMiner.AppImage
```
### 3. Set up Yomitan Dictionaries
On first launch, SubMiner now:
```bash
subminer app --yomitan
```
- starts in the tray/background
- creates the default config directory and `config.jsonc`
- opens a compact setup popup
- can install the mpv plugin to the default mpv scripts location for you
- links directly to Yomitan settings so you can install dictionaries before finishing setup
Existing installs that already have a valid config plus at least one Yomitan dictionary are auto-detected as complete and will not be re-prompted.
### 3. Finish setup
- click `Install mpv plugin` if you want the default plugin auto-start flow
- click `Open Yomitan Settings` and install at least one dictionary
- click `Refresh status`
- click `Finish setup`
The mpv plugin step is optional. Yomitan must report at least one installed dictionary before setup can be completed.
### 4. Mine
```bash
subminer app --start --background
subminer video.mkv # default plugin config auto-starts visible overlay + resumes playback when ready
subminer --start video.mkv # optional explicit overlay start when plugin auto_start=no
```
@@ -85,7 +92,7 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
| Required | Optional |
| ------------------------------------------ | -------------------------------------------------- |
| `bun` | |
| `bun`, `node` 22, `npm` | |
| `mpv` with IPC socket | `yt-dlp` |
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
@@ -109,7 +116,7 @@ The Bun-managed discovery lanes intentionally exclude a small set of suites that
## Acknowledgments
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [texthooker-ui](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [autosubsync-mpv](https://github.com/joaquintorres/autosubsync-mpv). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
## License

View File

@@ -1,11 +1,11 @@
project_name: 'SubMiner'
default_status: 'To Do'
statuses: ['To Do', 'In Progress', 'Done']
project_name: "SubMiner"
default_status: "To Do"
statuses: ["To Do", "In Progress", "Done"]
labels: []
definition_of_done: []
date_format: yyyy-mm-dd
max_column_width: 20
default_editor: 'nvim'
default_editor: "nvim"
auto_open_browser: false
default_port: 6420
remote_operations: true
@@ -13,4 +13,4 @@ auto_commit: false
bypass_git_hooks: false
check_active_branches: true
active_branch_days: 30
task_prefix: 'task'
task_prefix: "task"

View File

@@ -0,0 +1,49 @@
---
id: TASK-100
title: 'Add configurable texthooker startup launch'
status: Done
assignee: []
created_date: '2026-03-06 23:30'
updated_date: '2026-03-07 01:59'
labels: []
dependencies: []
priority: medium
ordinal: 10000
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add a config option under `texthooker` to launch the built-in texthooker server automatically when SubMiner starts.
Scope:
- Add `texthooker.launchAtStartup`.
- Default to `true`.
- Start the existing texthooker server during normal app startup when enabled.
- Keep `texthooker.openBrowser` as separate behavior.
- Add regression coverage and update generated config docs/example.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Default config enables automatic texthooker startup.
- [x] #2 Config parser accepts valid boolean values and warns on invalid values.
- [x] #3 App-ready startup launches texthooker when enabled.
- [x] #4 Generated config template/example documents the new option.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added `texthooker.launchAtStartup` with a default of `true`, wired it through config defaults/validation/template generation, and started the existing texthooker server during app-ready startup without coupling it to browser auto-open behavior.
Also added regression coverage for config parsing/template output and app-ready dependency wiring, then regenerated the checked-in config example artifacts.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,39 @@
---
id: TASK-101
title: Index AniList character alternative names in the character dictionary
status: Done
assignee: []
created_date: '2026-03-07 00:00'
updated_date: '2026-03-08 00:11'
labels:
- dictionary
- anilist
dependencies: []
references:
- src/main/character-dictionary-runtime.ts
- src/main/character-dictionary-runtime.test.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Index AniList character alternative names in generated character dictionaries so aliases like Shadow resolve during subtitle lookup instead of falling through to unrelated generic dictionary entries.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Character fetch reads AniList alternative character names needed for lookup coverage
- [x] #2 Generated term banks include alias-derived terms for subtitle lookups like シャドウ
- [x] #3 Regression coverage proves alternative-name indexing works end to end
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Character dictionary generation now requests AniList `name.alternative`, indexes those aliases as term candidates, and expands mixed aliases like `Minoru Kagenou (影野ミノル)` into usable outer/inner variants. Also extended kana alias synthesis so the AniList alias `Shadow` emits `シャドウ`, which matches the subtitle token the user hit in The Eminence in Shadow.
Bumped the character-dictionary snapshot format to invalidate stale cached snapshots, and updated merged-dictionary rebuilds to refresh invalid snapshots before composing the ZIP so old cache files do not hard-fail the merge path.
Verified with `bun test src/main/character-dictionary-runtime.test.ts` and `bun run tsc --noEmit`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,65 @@
---
id: TASK-102
title: Quiet default AppImage startup and implicit background launch
status: Done
assignee:
- codex
created_date: '2026-03-06 21:20'
updated_date: '2026-03-06 21:33'
labels: []
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main-entry-runtime.ts
- /home/sudacode/projects/japanese/SubMiner/src/core/services/cli-command.ts
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Make the packaged Linux no-arg launch path behave like a quiet background start instead of surfacing startup-only noise.
Scope:
- Treat default background entry launches as implicit `--start --background`.
- Keep the `--password-store` diagnostic out of normal startup output.
- Suppress known startup-only `node:sqlite` and `lsfg-vk` warnings for the entry/background launch path.
- Avoid noisy protocol-registration warnings during normal startup when registration is unsupported.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Initial background launch reaches the start path without logging `No running instance. Use --start to launch the app.`
- [x] #2 Default startup no longer emits the `Applied --password-store gnome-libsecret` line at normal log levels.
- [x] #3 Entry/background launch sanitization suppresses the observed `ExperimentalWarning: SQLite...` and `lsfg-vk ... unsupported configuration version` startup noise.
- [x] #4 Regression coverage documents the new startup behavior.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Normalized no-arg/password-store-only entry launches to append implicit `--start --background`, and upgraded `--background`-only entry launches to include `--start`.
Applied shared entry env sanitization before loading the main process so default startup strips the `lsfg-vk` Vulkan layer and sets `NODE_NO_WARNINGS=1`; background children keep the same sanitized env.
Downgraded startup-only protocol-registration failure logging to debug, and routed the Linux password-store diagnostic through the scoped debug logger instead of raw console output.
Verification:
- `bun test src/main-entry-runtime.test.ts src/main/runtime/anilist-setup-protocol.test.ts src/main/runtime/anilist-setup-protocol-main-deps.test.ts`
- `bun run test:fast`
Note: the final `node --experimental-sqlite --test dist/main/runtime/registry.test.js` step in `bun run test:fast` still prints Node's own experimental SQLite warning because that test command explicitly enables the feature flag outside the app entrypoint.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Default packaged startup is now quiet and behaves like an implicit `--start --background` launch.
- No-arg AppImage entry launches now append `--start --background`, and `--background`-only launches append the missing `--start`.
- Entry/background startup sanitization now suppresses the observed `lsfg-vk` and `node:sqlite` warnings on the app launch path.
- Linux password-store and unsupported protocol-registration diagnostics now stay at debug level instead of normal startup output.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,38 @@
---
id: TASK-103
title: Add dedicated annotation websocket for texthooker
status: Done
assignee:
- codex
created_date: '2026-03-07 02:20'
updated_date: '2026-03-07 02:20'
labels:
- texthooker
- websocket
- subtitle
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add a separate annotated subtitle websocket for bundled texthooker so token/JLPT/frequency markup is available on a stable dedicated port even when the regular websocket is in `auto` mode and skipped because `mpv_websocket` is installed.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Regular `websocket.enabled: "auto"` behavior remains unchanged and still skips the regular websocket when `mpv_websocket` is installed.
- [x] #2 A separate `annotationWebsocket` config controls an independent annotated websocket with default port `6678`.
- [x] #3 Bundled texthooker is pointed at the annotation websocket when it is enabled.
- [x] #4 Focused regression tests cover config parsing, startup wiring, and texthooker bootstrap injection.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added `annotationWebsocket.enabled`/`annotationWebsocket.port` with defaults of `true`/`6678`, started that websocket independently from the regular auto-managed websocket, and injected the bundled texthooker websocket URL so it connects to the annotation feed by default.
Also added focused regression coverage and regenerated the checked-in config examples.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-104
title: Mirror overlay annotation hover behavior in vendored texthooker
status: Done
assignee:
- codex
created_date: '2026-03-06 21:45'
updated_date: '2026-03-06 21:45'
labels:
- texthooker
- subtitle
- websocket
dependencies:
- TASK-103
references:
- /home/sudacode/projects/japanese/SubMiner/src/core/services/subtitle-ws.ts
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/components/App.svelte
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/line-markup.ts
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/app.css
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Bring bundled texthooker annotation rendering closer to the visible overlay. Keep the lightweight texthooker UX, but preserve token metadata for hover, match overlay color-precedence rules across known/N+1/name/frequency/JLPT, expose name-match highlighting as a toggle, and emit a structured annotation payload on the dedicated websocket so non-SubMiner clients can treat it as an API.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Annotation websocket payload includes both rendered `sentence` HTML and structured token metadata for generic clients.
- [x] #2 Vendored texthooker preserves annotation metadata attrs needed for hover labels and uses overlay-matching color precedence rules.
- [x] #3 Vendored texthooker supports character-name highlighting with a user-facing toggle and standalone-web note.
- [x] #4 Hovering annotated texthooker tokens reveals JLPT/frequency metadata without adding the full overlay popup workflow.
- [x] #5 Focused serializer, texthooker markup, socket parsing, CSS, and build verification pass.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Extended the dedicated annotation websocket payload to ship `version`, plain `text`, rendered `sentence`, and structured `tokens` metadata while keeping backward-compatible `sentence` consumers working. Updated the vendored texthooker to preserve hover metadata attrs, follow overlay color precedence for known/N+1/name/frequency/JLPT annotations, add a character-name highlight toggle plus standalone-web dictionary note, and render lightweight hover labels for frequency/JLPT metadata. Added focused regression coverage and rebuilt both the vendored texthooker bundle and SubMiner.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,47 @@
---
id: TASK-105
title: Stop local docs artifact writes after docs repo split
status: Done
assignee: []
created_date: '2026-03-07 00:00'
updated_date: '2026-03-07 00:20'
labels: []
dependencies: []
priority: medium
ordinal: 10500
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Now that user-facing docs live in `../subminer-docs`, first-party scripts in this repo should not keep writing generated artifacts into the local `docs/` tree.
Scope:
- Audit first-party scripts/automation for writes to `docs/`.
- Keep repo-local outputs only where they are still intentionally owned by this repo.
- Repoint generated docs artifacts to `../subminer-docs` when that is the maintained source of truth.
- Add regression coverage for the config-example generation path contract.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 The config-example generator no longer writes to `docs/public/config.example.jsonc` inside this repo.
- [x] #2 When `../subminer-docs` exists, the generator updates `../subminer-docs/public/config.example.jsonc`.
- [x] #3 Automated coverage guards the output-path contract so local docs writes do not regress.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Removed the first-party local `docs/public` config-example write path from `src/generate-config-example.ts` and replaced it with sibling-docs-repo detection that targets `../subminer-docs/public/config.example.jsonc` only when that repo exists.
Added a project-local regression suite for output-path resolution and artifact writing, wired that suite into the maintained config test lane, and removed the stale generated `docs/public/config.example.jsonc` artifact from the working tree.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,69 @@
---
id: TASK-106
title: Add first-run setup gate and auto-install flow
status: Done
assignee:
- codex
created_date: '2026-03-07 06:10'
updated_date: '2026-03-07 06:20'
labels: []
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
- /home/sudacode/projects/japanese/SubMiner/src/shared/setup-state.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/first-run-setup-service.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/first-run-setup-window.ts
- /home/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
priority: high
ordinal: 10600
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Replace the current manual install flow with a first-run setup gate:
- bootstrap the default config dir/config file automatically
- detect legacy installs and mark them complete when config + Yomitan dictionaries are already present
- open a compact Catppuccin Macchiato setup popup for incomplete installs
- optionally install the mpv plugin into the default mpv location
- block launcher playback until setup completes, then resume the original playback flow
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 First app launch seeds the default config dir/config file without manual copy steps.
- [x] #2 Existing installs with config plus at least one Yomitan dictionary are auto-detected as already complete.
- [x] #3 Incomplete installs get a first-run setup popup with mpv plugin install, Yomitan settings, refresh, skip, and finish actions.
- [x] #4 Launcher playback waits for setup completion and does not start mpv while setup is incomplete.
- [x] #5 Plugin assets are packaged into the Electron bundle and regression tests cover the new flow.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added shared setup-state/config/mpv path helpers so Electron and launcher read the same onboarding state file.
Introduced a first-run setup service plus compact BrowserWindow popup using Catppuccin Macchiato styling. The popup supports optional mpv plugin install, opening Yomitan settings, status refresh, skip-plugin, and gated finish once at least one Yomitan dictionary is installed.
Electron startup now bootstraps a default config file, auto-detects legacy-complete installs, adds `--setup` CLI support, exposes a tray `Complete Setup` action while incomplete, and avoids reopening setup once completion is recorded.
Launcher playback now checks the shared setup-state file before starting mpv. If setup is incomplete, it launches the app with `--background --setup`, waits for completion, and only then proceeds.
Verification:
- `bun run typecheck`
- `bun run test:fast`
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
SubMiner now supports a download-and-launch install flow.
- First launch auto-creates config and opens setup only when needed.
- Existing users with working installs are silently migrated to completed setup.
- The setup popup handles optional mpv plugin install and Yomitan dictionary readiness.
- Launcher playback is gated on setup completion and resumes automatically afterward.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,42 @@
---
id: TASK-107
title: 'Fix Yomitan scan-token fallback fragmentation on exact-source misses'
status: Done
assignee: []
created_date: '2026-03-07 01:10'
updated_date: '2026-03-07 01:12'
labels: []
dependencies: []
priority: high
ordinal: 9007
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Left-to-right Yomitan scanning can emit bogus fallback tokens when `termsFind` returns entries but none of their headwords carries an exact primary source for the consumed substring. Repro: `だが それでも届かぬ高みがあった` currently yields trailing fragments like `があ` / `た`, which blocks the real `あった` token from receiving frequency highlighting.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Scanner skips `termsFind` fallback entries that are not backed by an exact primary source for the consumed substring.
- [x] #2 Repro line no longer yields bogus trailing fragments such as `があ`.
- [x] #3 Regression coverage added for the scan-token path.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Removed the scan-token helper fallback that previously emitted a token from the first returned headword even when Yomitan did not report an exact primary source for the consumed substring. Added a focused regression test covering `だが それでも届かぬ高みがあった`, ensuring bogus `があ` fragmentation is skipped so the later `あった` exact match can still be tokenized and highlighted.
Verification:
- `bun test src/core/services/tokenizer/yomitan-parser-runtime.test.ts src/core/services/tokenizer.test.ts --timeout 20000`
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-108
title: 'Exclude single kana tokens from frequency highlighting'
status: Done
assignee: []
created_date: '2026-03-07 01:18'
updated_date: '2026-03-07 01:22'
labels: []
dependencies: []
priority: medium
ordinal: 9008
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Suppress frequency highlighting for single-character hiragana or katakana tokens. Scope is frequency-only: known/N+1/JLPT behavior stays unchanged.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Single-character hiragana tokens do not retain `frequencyRank`.
- [x] #2 Single-character katakana tokens do not retain `frequencyRank`.
- [x] #3 Regression coverage exists at annotation-stage and tokenizer levels.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added a frequency-only suppression rule for single-character kana tokens based on token `surface`, so bogus merged fragments like `た` and standalone one-character kana no longer keep `frequencyRank`. Regression coverage now exists both in the annotation stage and in the tokenizer path, while multi-character tokens and N+1/JLPT behavior remain unchanged.
Verification:
- `bun test src/core/services/tokenizer/annotation-stage.test.ts --timeout 20000`
- `bun test src/core/services/tokenizer.test.ts --timeout 20000`
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,51 @@
---
id: TASK-110
title: Replace vendored Yomitan with submodule-built Chrome artifact workflow
status: Done
assignee: []
created_date: '2026-03-07 11:05'
updated_date: '2026-03-07 11:22'
labels:
- yomitan
- build
- release
dependencies: []
priority: high
ordinal: 9010
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Replace the checked-in `vendor/yomitan` release tree with a `subminer-yomitan` git submodule. Build Yomitan from source, extract the Chromium zip artifact into a stable local build directory, and make SubMiner dev/runtime/tests/release packaging load that extracted extension instead of the source tree or vendored files.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Repo tracks Yomitan as a git submodule instead of committed extension files under `vendor/yomitan`.
- [x] #2 SubMiner has a reproducible build/extract step that produces a local Chromium extension directory from `subminer-yomitan`.
- [x] #3 Dev/runtime/tests resolve the extracted build output as the default Yomitan extension path.
- [x] #4 Release packaging includes the extracted Chromium extension files instead of the old vendored tree.
- [x] #5 Docs and verification commands reflect the new workflow.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Replaced the checked-in `vendor/yomitan` extension tree with a `vendor/subminer-yomitan` git submodule and added a reproducible `bun run build:yomitan` workflow that builds `yomitan-chrome.zip`, extracts it into `build/yomitan`, and reuses a source-state stamp to skip redundant rebuilds. Runtime path resolution, helper CLIs, Yomitan integration tests, packaging, CI cache keys, and README source-build notes now all target that generated artifact instead of the old vendored files.
Verification:
- `bun run build:yomitan`
- `bun test src/core/services/yomitan-extension-paths.test.ts src/core/services/yomitan-structured-content-generator.test.ts src/yomitan-translator-sort.test.ts`
- `bun run typecheck`
- `bun run build`
- `bun run test:core:src`
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,71 @@
---
id: TASK-111
title: Fix subtitle-cycle OSD labels for J keybindings
status: Done
assignee:
- Codex
created_date: '2026-03-07 23:45'
updated_date: '2026-03-08 00:06'
labels: []
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.ts
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.test.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv-control.test.ts
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
When cycling subtitle tracks with the default J/Shift+J keybindings, the mpv OSD currently shows raw template text like `${sid}` instead of a resolved subtitle label. Update the keybinding OSD behavior so users see the active subtitle selection clearly when cycling tracks, and ensure placeholder-based OSD messages sent through the mpv client API render correctly.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Pressing the primary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${sid}` placeholder.
- [x] #2 Pressing the secondary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${secondary-sid}` placeholder.
- [x] #3 Proxy OSD messages that rely on mpv property expansion render resolved values when sent through the mpv client API.
- [x] #4 Regression tests cover the subtitle-cycle OSD behavior and the placeholder-expansion OSD path.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add focused failing tests for subtitle-cycle OSD labels and mpv placeholder-expansion behavior.
2. Update the IPC mpv command handler to resolve primary and secondary subtitle track labels from mpv `track-list` data after cycling subtitle tracks.
3. Update the mpv OSD runtime path so placeholder-based `show-text` messages sent through the client API opt into property expansion.
4. Run focused tests, then the relevant core test lane, and record results in the task notes.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Initial triage: `ipc-command.ts` emits raw `${sid}`/`${secondary-sid}` placeholder strings, and `showMpvOsdRuntime` sends `show-text` via mpv client API without enabling property expansion.
User approved implementation plan on 2026-03-07.
Implementation: proxy mpv command OSD now supports an async resolver so subtitle track cycling can show human-readable labels instead of raw `${sid}` placeholders.
Implementation: `showMpvOsdRuntime` now prefixes placeholder-based messages with mpv client-api `expand-properties`, which fixes raw `${...}` OSD output for subtitle delay/position messages.
Testing: `bun test src/core/services/ipc-command.test.ts src/core/services/mpv-control.test.ts src/main/runtime/mpv-proxy-osd.test.ts src/main/runtime/ipc-mpv-command-main-deps.test.ts src/main/runtime/ipc-bridge-actions.test.ts src/main/runtime/ipc-bridge-actions-main-deps.test.ts src/main/runtime/composers/ipc-runtime-composer.test.ts` passed.
Testing: `bun x tsc --noEmit` passed.
Testing: `bun run test:core:src` passed (423 pass, 6 skip, 0 fail).
Docs: no update required because no checked-in docs or help text describe the J/Shift+J OSD output behavior.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed subtitle-cycle OSD handling for the default J/Shift+J keybindings. The IPC mpv command path now supports resolving proxy OSD text asynchronously, and the main-runtime resolver reads mpv `track-list` state so primary and secondary subtitle cycling show human-readable track labels instead of raw `${sid}` / `${secondary-sid}` placeholders.
Also fixed the lower-level mpv OSD transport so placeholder-based `show-text` messages sent through the client API opt into `expand-properties`. That preserves existing template-based OSD messages like subtitle delay and subtitle position without leaking the raw `${...}` syntax.
Added regression coverage for the async proxy OSD path, the placeholder-expansion `showMpvOsdRuntime` path, and the runtime subtitle-track label resolver. Verification run: `bun x tsc --noEmit`; focused mpv/IPC tests; and the maintained `bun run test:core:src` lane (423 pass, 6 skip, 0 fail).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,61 @@
---
id: TASK-112
title: Address Claude review items on PR 15
status: Done
assignee:
- codex
created_date: '2026-03-08 00:11'
updated_date: '2026-03-08 00:12'
labels:
- pr-review
- ci
dependencies: []
references:
- .github/workflows/release.yml
- .github/workflows/ci.yml
- .gitmodules
- >-
backlog/tasks/task-101 -
Index-AniList-character-alternative-names-in-the-character-dictionary.md
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Review Claude's PR feedback on PR #15, implement only the technically valid fixes on the current branch, and document which comments are non-actionable or already acceptable.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Validated Claude's concrete PR review items against current branch state and repo conventions
- [x] #2 Implemented the accepted fixes with regression coverage or verification where applicable
- [x] #3 Documented which review items are non-blocking or intentionally left unchanged
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Validate each Claude review item against current branch files and repo workflow.
2. Patch release quality-gate to match CI ordering and add explicit typecheck.
3. Remove duplicate .gitmodules stanza and normalize the TASK-101 reference path through Backlog MCP.
4. Run relevant verification for workflow/config metadata changes and record which review items remain non-actionable.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
User asked to address Claude PR comments on PR #15 and assess whether any action items remain. Treat review suggestions skeptically; only fix validated defects.
Validated Claude's five review items. Fixed release workflow ordering/typecheck, removed the duplicate .gitmodules entry, and normalized TASK-101 references to repo-relative paths via Backlog MCP.
Left the vendor/subminer-yomitan branch-pin suggestion unchanged. The committed submodule SHA already controls reproducibility; adding a branch would only affect update ergonomics and was not required to address a concrete defect.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Validated Claude's PR #15 review summary against the current branch and applied the actionable fixes. In `.github/workflows/release.yml`, the release `quality-gate` job now restores the dependency cache before installation, no longer installs twice, and runs `bun run typecheck` before the fast test suite to match CI expectations. In `.gitmodules`, removed the duplicate `vendor/yomitan-jlpt-vocab` stanza with the conflicting duplicate path. Through Backlog MCP, updated `TASK-101` references from an absolute local path to repo-relative paths so the task metadata is portable across contributors.
Verification: `git diff --check`, `git config -f .gitmodules --get-regexp '^submodule\..*\.path$'`, `bun run typecheck`, and `bun run test:fast` all passed. `bun run format:check` still fails on many pre-existing unrelated files already present on the branch, including multiple backlog task files and existing source/docs files; this review patch did not attempt a repo-wide formatting sweep.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,59 @@
---
id: TASK-113
title: Scope make pretty to maintained source files
status: Done
assignee:
- codex
created_date: '2026-03-08 00:20'
updated_date: '2026-03-08 00:22'
labels:
- tooling
- formatting
dependencies: []
references:
- Makefile
- package.json
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Change the `make pretty` workflow so it formats only the maintained source/config files we intentionally keep under Prettier, instead of sweeping backlog/docs/generated content across the whole repository.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 `make pretty` formats only the approved maintained source/config paths
- [x] #2 The allowlist is reusable for check/write flows instead of duplicating path logic
- [x] #3 Verification shows the scoped formatting command targets the intended files without touching backlog or vendored content
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Inspect current Prettier config/ignore behavior and keep the broad repo-wide format command unchanged.
2. Add a reusable scoped Prettier script that targets maintained source/config paths only.
3. Update `make pretty` to call the scoped script.
4. Verify the scoped command resolves only intended files and does not traverse backlog or vendor paths.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
User approved the allowlist approach: keep repo-wide `format` intact, make `make pretty` use a maintained-path formatter scope.
Added `scripts/prettier-scope.sh` as the single allowlist for scoped Prettier paths and wired `format:src` / `format:check:src` to it.
Updated `make pretty` to call `bun run format:src`. Verified with `make -n pretty` and shell tracing that the helper only targets the maintained allowlist and does not traverse `backlog/` or `vendor/`.
Excluded `Makefile` and `.prettierignore` from the allowlist after verification showed Prettier cannot infer parsers for them.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Scoped the repo's day-to-day formatting entrypoint without changing the existing broad repo-wide Prettier scripts. Added `scripts/prettier-scope.sh` as the shared allowlist for maintained source/config paths (`.github`, `build`, `launcher`, `scripts`, `src`, plus selected root JSON config files), added `format:src` and `format:check:src` in `package.json`, and updated `make pretty` to run the scoped formatter.
Verification: `make -n pretty` now resolves to `bun run format:src`. `bash -n scripts/prettier-scope.sh` passed, and shell-traced `bash -x scripts/prettier-scope.sh --check` confirmed the exact allowlist passed to Prettier. `bun run format:check:src` fails only because existing files inside the allowed source scope are not currently formatted; it no longer touches `backlog/` or `vendor/`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,62 @@
---
id: TASK-114
title: Fix failing CI checks on PR 15
status: Done
assignee:
- codex
created_date: '2026-03-08 00:34'
updated_date: '2026-03-08 00:37'
labels:
- ci
- test
dependencies: []
references:
- src/renderer/subtitle-render.test.ts
- src/renderer/style.css
- .github/workflows/ci.yml
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Investigate the failing GitHub Actions CI run for PR #15 on branch `yomitan-fork`, fix the underlying test or code regression, and verify the affected local test/CI lane passes.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Identified the concrete failing CI job and captured the relevant failure context
- [x] #2 Implemented the minimal code or test change needed to resolve the CI failure
- [x] #3 Verified the affected local test target and the broader fast CI test lane pass
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Inspect the failing GitHub Actions run and confirm the exact failing test/assertion.
2. Reproduce the failing renderer stylesheet test locally and compare the assertion against current CSS.
3. Apply the minimal test or stylesheet fix needed to restore the intended hover/selection behavior.
4. Re-run the targeted renderer test, then re-run `bun run test` to verify the fast CI lane is green.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
GitHub Actions run 22810400921 failed in job build-test-audit, step `Test suite (source)`, with a single failing test: `JLPT CSS rules use underline-only styling in renderer stylesheet` in src/renderer/subtitle-render.test.ts.
Reproduced the failing test locally with `bun test src/renderer/subtitle-render.test.ts`. The failure was a brittle stylesheet assertion, not a renderer behavior regression.
Updated the renderer stylesheet test helper to split selectors safely across `:is(...)` commas and normalize multiline selector whitespace, then switched the failing hover/JLPT assertions to inspect extracted rule blocks instead of matching the entire CSS file text.
Verification passed with `bun test src/renderer/subtitle-render.test.ts` and `bun run test`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Investigated GitHub Actions CI run `22810400921` for PR #15 and confirmed the only failing job was `build-test-audit`, step `Test suite (source)`, with a single failure in `src/renderer/subtitle-render.test.ts` (`JLPT CSS rules use underline-only styling in renderer stylesheet`).
The renderer CSS itself was still correct; the regression was in the test helper. `extractClassBlock` was splitting selector lists on every comma, which breaks selectors containing `:is(...)`, and the affected assertions fell back to brittle whole-file regex matching against a multiline selector. Fixed the test by teaching the helper to split selectors only at top-level commas, normalizing selector whitespace around multiline `:not(...)` / `:is(...)` clauses, and asserting on extracted rule blocks for the plain-word hover and JLPT-only hover/selection rules.
Verification: `bun test src/renderer/subtitle-render.test.ts` passed, and `bun run test` passed end to end (the same fast lane that failed in CI).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,59 @@
---
id: TASK-115
title: Refresh subminer-docs contributor docs for current repo workflow
status: Done
assignee:
- codex
created_date: '2026-03-08 00:40'
updated_date: '2026-03-08 00:42'
labels:
- docs
dependencies: []
references:
- ../subminer-docs/development.md
- ../subminer-docs/README.md
- Makefile
- package.json
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Update the sibling `subminer-docs` repo so contributor/development docs match the current SubMiner repo workflow after the docs split and recent tooling changes, including removing stale in-repo docs build steps and documenting the scoped formatting command.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Contributor docs in `subminer-docs` no longer reference stale in-repo docs build commands for the app repo
- [x] #2 Contributor docs mention the current scoped formatting workflow (`make pretty` / `format:src`) where relevant
- [x] #3 Removed stale or no-longer-needed instructions that no longer match the current repo layout
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Inspect `subminer-docs` for contributor/development instructions that drifted after the docs repo split and recent tooling changes.
2. Update contributor docs to remove stale app-repo docs commands and document the current scoped formatting workflow.
3. Verify the modified docs page and build the docs site from the sibling docs repo when local dependencies are available.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Detected concrete doc drift in `subminer-docs/development.md`: stale in-repo docs build commands and no mention of the scoped `make pretty` formatter.
Updated `../subminer-docs/development.md` to remove stale app-repo docs build steps from the local gate, document `make pretty` / `format:check:src`, and point docs-site work to the sibling docs repo explicitly.
Installed docs repo dependencies locally with `bun install` and verified the docs site with `bun run docs:build` in `../subminer-docs`.
Did not change `../subminer-docs/README.md`; it was already accurate for the docs repo itself.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Refreshed the contributor/development docs in the sibling `subminer-docs` repo to match the current SubMiner workflow. In `development.md`, removed the stale app-repo `bun run docs:build` step from the local CI-equivalent gate, added an explicit note to run docs builds from `../subminer-docs` when docs change, documented the scoped formatting workflow (`make pretty` and `bun run format:check:src`), and replaced the old in-repo `make docs*` instructions with the correct sibling-repo `bun run docs:*` commands. Also updated the Makefile reference to include `make pretty` and removed the obsolete `make docs-dev` entry.
Verification: installed docs repo dependencies with `bun install` in `../subminer-docs` and ran `bun run docs:build` successfully. Left `README.md` unchanged because it was already accurate for the standalone docs repo.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,53 @@
---
id: TASK-116
title: Audit branch commits for remaining subminer-docs updates
status: Done
assignee:
- codex
created_date: '2026-03-08 00:46'
updated_date: '2026-03-08 00:48'
labels:
- docs
dependencies: []
references:
- ../subminer-docs/installation.md
- ../subminer-docs/troubleshooting.md
- src/core/services/yomitan-extension-paths.ts
- scripts/build-yomitan.mjs
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Review recent `yomitan-fork` commits against the sibling `subminer-docs` repo, identify any concrete documentation drift that remains after the earlier contributor-doc updates, and patch the docs for behavior/tooling changes that are now outdated or misleading.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Reviewed recent branch commits for user-facing or contributor-facing changes that may require docs updates
- [x] #2 Updated `subminer-docs` pages where branch changes introduced concrete doc drift
- [x] #3 Verified the docs site still builds after the updates
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Review branch commit themes against `subminer-docs` and identify only concrete drift introduced by recent workflow/runtime changes.
2. Patch docs for the Yomitan submodule build workflow, updated source-build prerequisites, and current runtime Yomitan search paths/manual fallback path.
3. Rebuild the docs site to verify the updated pages render cleanly.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Concrete remaining drift after commit audit: installation/development docs still understate the Node/npm + submodule requirements for the Yomitan build flow, and troubleshooting still points at obsolete `vendor/yomitan` / `extensions/yomitan` paths.
Audited branch commits against subminer-docs coverage. Existing docs already cover first-run setup, texthooker startup/annotated websocket config, AniList merged character dictionaries, configurable collapsible sections, and subtitle name highlighting. Patched remaining drift around source-build prerequisites and Yomitan build/install paths in installation.md, development.md, and troubleshooting.md. Verified with `bun run docs:build` in ../subminer-docs.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Audited branch commits for missing documentation updates in ../subminer-docs. Updated installation, development, and troubleshooting docs to match the current Yomitan submodule build flow, source-build prerequisites, and runtime extension search/manual fallback paths. Confirmed other recent branch features were already documented and rebuilt the docs site successfully.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -3,10 +3,10 @@ id: TASK-87
title: >-
Codebase health: harden verification and retire dead architecture identified
in the March 2026 review
status: To Do
status: In Progress
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:20'
updated_date: '2026-03-06 11:11'
labels:
- tech-debt
- tests
@@ -19,9 +19,10 @@ references:
- src/main.ts
- src/anki-integration.ts
- src/core/services/immersion-tracker-service.test.ts
- src/translators/index.ts
- src/subsync/engines.ts
- src/subtitle/pipeline.ts
- src/translators/index.ts
- src/subsync/engines.ts
- src/subtitle/pipeline.ts
- backlog/tasks/task-87.5 - Dead-architecture-cleanup-delete-unused-registry-and-pipeline-modules-that-are-off-the-live-path.md
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
priority: high
@@ -69,3 +70,10 @@ Shared review context to restate in child tasks:
- src/main.ts trips many noUnusedLocals/noUnusedParameters diagnostics.
- src/translators/index.ts, src/subsync/engines.ts, src/subtitle/pipeline.ts, src/tokenizers/index.ts, and src/token-mergers/index.ts appeared unreferenced during review and must be re-verified before deletion.
<!-- SECTION:PLAN:END -->
## Progress Notes
- `TASK-87.5` is complete. The isolated dead registry/pipeline modules were re-verified as off the maintained runtime path and removed.
- Live subtitle tokenization now owns the zero-width separator normalization that previously only existed in the dead subtitle pipeline path, so the cleanup did not drop that behavior.
- Verification completed for the cleanup slice with `bun test src/core/services/tokenizer.test.ts`, `bun test src/dead-architecture-cleanup.test.ts`, `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`, `bun run tsc`, and `bun run test:src`.
- Remaining parent-task scope still includes the broader verification hardening, `src/main.ts` dead-symbol cleanup, and `src/anki-integration.ts` decomposition work tracked by the other child tasks.

View File

@@ -3,10 +3,10 @@ id: TASK-87.4
title: >-
Runtime composition root: remove dead symbols and tighten module boundaries in
src/main.ts
status: To Do
status: Done
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:21'
updated_date: '2026-03-06 18:10'
labels:
- tech-debt
- runtime
@@ -36,10 +36,10 @@ A noUnusedLocals/noUnusedParameters compile pass reports a large concentration o
<!-- AC:BEGIN -->
- [ ] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
- [ ] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
- [ ] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
- [ ] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
- [x] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
- [x] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
- [x] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
- [x] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
<!-- AC:END -->
## Implementation Plan
@@ -51,3 +51,13 @@ A noUnusedLocals/noUnusedParameters compile pass reports a large concentration o
3. Keep changes behavior-preserving and avoid mixing unrelated cleanup outside src/main.ts unless required to compile.
4. Verify with the updated runtime/startup test commands from TASK-87.1 plus a noUnused compile pass.
<!-- SECTION:PLAN:END -->
## Completion Notes
- Removed the dead import/destructure backlog from `src/main.ts` and deleted stale wrapper seams that no longer owned runtime behavior after the composer/runtime extractions.
- Tightened module boundaries so the composition root depends on the composed/public runtime surfaces it actually uses instead of retaining unused lower-level domain factory symbols.
- Cleared the remaining strict `noUnusedLocals`/`noUnusedParameters` failures in nearby touched files required for a clean repo-wide pass: `launcher/commands/playback-command.ts`, `src/anki-integration.ts`, `src/anki-integration/field-grouping-workflow.ts`, `src/core/services/tokenizer/yomitan-parser-runtime.test.ts`, and `src/main/runtime/composers/composer-contracts.type-test.ts`.
- Verification:
- `bunx tsc --noEmit -p tsconfig.typecheck.json --noUnusedLocals --noUnusedParameters --pretty false`
- `bun run test:fast`
- Commit: `e659b5d` (`refactor(runtime): remove dead symbols from composition roots`)

View File

@@ -3,10 +3,10 @@ id: TASK-87.5
title: >-
Dead architecture cleanup: delete unused registry and pipeline modules that
are off the live path
status: To Do
status: Done
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
updated_date: '2026-03-06 11:05'
labels:
- tech-debt
- dead-code
@@ -40,10 +40,10 @@ The review found several modules that appear self-contained but unused from the
<!-- AC:BEGIN -->
- [ ] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
- [ ] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
- [ ] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
- [ ] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
- [x] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
- [x] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
- [x] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
- [x] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
<!-- AC:END -->
## Implementation Plan
@@ -55,3 +55,10 @@ The review found several modules that appear self-contained but unused from the
3. Pay special attention to subtitle sync and tokenization surfaces, since duplicate architecture exists near active code.
4. Verify the relevant tokenization and subsync commands/tests still pass and update any stale docs or notes.
<!-- SECTION:PLAN:END -->
## Implementation Notes
- Traced imports from `src/main.ts`, `src/main/runtime/**`, `src/core/services/subsync-runner.ts`, and `src/core/services/tokenizer.ts`; confirmed the candidate registry/pipeline modules were isolated from the maintained runtime path.
- Deleted dead modules: `src/translators/index.ts`, `src/subsync/engines.ts`, `src/subtitle/pipeline.ts`, `src/subtitle/stages/{merge,normalize,tokenize}.ts`, `src/subtitle/stages/normalize.test.ts`, `src/tokenizers/index.ts`, and `src/token-mergers/index.ts`.
- Moved the useful zero-width separator normalization into the live tokenizer path in `src/core/services/tokenizer.ts` and added regression coverage plus a repository-level dead-architecture guard in `src/dead-architecture-cleanup.test.ts`.
- Verified with `bun test src/core/services/tokenizer.test.ts`, `bun test src/dead-architecture-cleanup.test.ts`, `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`, `bun run tsc`, and `bun run test:src`.

View File

@@ -3,10 +3,10 @@ id: TASK-87.6
title: >-
Anki integration maintainability: continue decomposing the oversized
orchestration layer
status: To Do
status: Done
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
updated_date: '2026-03-06 09:23'
labels:
- tech-debt
- anki
@@ -40,10 +40,10 @@ src/anki-integration.ts remains an oversized orchestration file even after earli
<!-- AC:BEGIN -->
- [ ] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
- [ ] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
- [ ] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
- [ ] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
- [x] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
- [x] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
- [x] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
- [x] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
<!-- AC:END -->
## Implementation Plan

View File

@@ -0,0 +1,39 @@
---
id: TASK-97
title: Add configurable character-name token highlighting
status: Done
assignee: []
created_date: '2026-03-06 10:15'
updated_date: '2026-03-06 10:15'
labels:
- subtitle
- dictionary
- renderer
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer.ts
- >-
/home/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer/yomitan-parser-runtime.ts
- /home/sudacode/projects/japanese/SubMiner/src/renderer/subtitle-render.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Color subtitle tokens that match entries from the SubMiner character dictionary, with a configurable default color and a config toggle that disables both rendering and name-match detection work.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Tokens matched from the SubMiner character dictionary receive dedicated renderer styling.
- [x] #2 `subtitleStyle.nameMatchEnabled` disables name-match detection work when false.
- [x] #3 `subtitleStyle.nameMatchColor` overrides the default `#f5bde6`.
- [x] #4 Regression coverage verifies config parsing, tokenizer propagation, scanner gating, and renderer class/CSS behavior.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added configurable character-name token highlighting with default color `#f5bde6` and config gate `subtitleStyle.nameMatchEnabled`. When enabled, left-to-right Yomitan scanning tags tokens whose winning dictionary entry comes from the SubMiner character dictionary; when disabled, the tokenizer skips that metadata work and the renderer suppresses name-match styling. Added focused regression tests for config parsing, main-deps wiring, Yomitan scan gating, token propagation, renderer classes, and CSS behavior.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,60 @@
---
id: TASK-98
title: Gate subtitle character-name highlighting on character dictionary enablement
status: Done
assignee:
- codex
created_date: '2026-03-07 00:54'
updated_date: '2026-03-07 00:56'
labels:
- subtitle
- character-dictionary
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/main.ts
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-subtitle.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Ensure subtitle tokenization and other annotations continue to work, but character-name lookup/highlighting is disabled whenever the AniList character dictionary feature is disabled. This avoids unnecessary name-match processing when the backing dictionary is unavailable.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 When anilist.characterDictionary.enabled is false, subtitle tokenization does not request character-name match metadata or highlight character names.
- [x] #2 When anilist.characterDictionary.enabled is true and subtitleStyle.nameMatchEnabled is true, existing character-name matching behavior remains enabled.
- [x] #3 Subtitle tokenization, JLPT, frequency, and other non-name annotation behavior remain unchanged when character dictionaries are disabled.
- [x] #4 Automated tests cover the runtime gating behavior.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a failing test in `src/main/runtime/subtitle-tokenization-main-deps.test.ts` proving name-match enablement resolves to false when `anilist.characterDictionary.enabled` is false even if `subtitleStyle.nameMatchEnabled` is true.
2. Update `src/main/runtime/subtitle-tokenization-main-deps.ts` and `src/main.ts` so subtitle tokenization only enables name matching when both the subtitle setting and the character dictionary setting are enabled.
3. Run focused Bun tests for the updated runtime deps and subtitle processing seams.
4. If verification stays green, check off acceptance criteria and record the result.
Implementation plan saved in `docs/plans/2026-03-06-character-name-gating.md`.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Created plan doc `docs/plans/2026-03-06-character-name-gating.md` after user approved the narrow runtime-gating approach. Proceeding with TDD from the subtitle tokenization main-deps seam.
Implemented the gate at the subtitle tokenization runtime-deps boundary so `getNameMatchEnabled` is false unless both `subtitleStyle.nameMatchEnabled` and `anilist.characterDictionary.enabled` are true.
Verification: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`, `bun test src/core/services/subtitle-processing-controller.test.ts`, `bun run typecheck`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Character-name lookup/highlighting is now suppressed when the AniList character dictionary is disabled, while subtitle tokenization and other annotation paths remain active. Added focused runtime-deps coverage and wired the main runtime to pass the character-dictionary enabled flag into subtitle tokenization.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,38 @@
---
id: TASK-99
title: Add configurable character dictionary collapsible section open states
status: Done
assignee: []
created_date: '2026-03-07 00:00'
updated_date: '2026-03-07 00:00'
labels:
- dictionary
- config
references:
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
- /home/sudacode/projects/japanese/SubMiner/src/config/resolve/integrations.ts
- /home/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-integrations.ts
priority: medium
dependencies: []
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add per-section config for character dictionary collapsible glossary sections so Description, Character Information, and Voiced by can each default open or closed independently. Default all sections closed.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Config supports `anilist.characterDictionary.collapsibleSections.description`.
- [x] #2 Config supports `anilist.characterDictionary.collapsibleSections.characterInformation`.
- [x] #3 Config supports `anilist.characterDictionary.collapsibleSections.voicedBy`.
- [x] #4 Default config keeps all generated character dictionary collapsible sections closed.
- [x] #5 Regression coverage verifies config parsing/warnings and generated glossary `details.open` behavior.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added per-section open-state config under `anilist.characterDictionary.collapsibleSections` for `description`, `characterInformation`, and `voicedBy`, all defaulting to `false`. Wired the glossary generator to read those settings so generated `details.open` matches config, and added regression coverage for defaults, parsing/warnings, registry exposure, and runtime glossary output.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -5,6 +5,7 @@
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
*/
{
// ==========================================
// Overlay Auto-Start
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
@@ -13,11 +14,12 @@
// ==========================================
// Texthooker Server
// Control whether browser opens automatically for texthooker.
// Configure texthooker startup launch and browser opening behavior.
// ==========================================
"texthooker": {
"openBrowser": true, // Open browser setting. Values: true | false
}, // Control whether browser opens automatically for texthooker.
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
"openBrowser": true // Open browser setting. Values: true | false
}, // Configure texthooker startup launch and browser opening behavior.
// ==========================================
// WebSocket Server
@@ -26,17 +28,41 @@
// ==========================================
"websocket": {
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
"port": 6677, // Built-in subtitle websocket server port.
"port": 6677 // Built-in subtitle websocket server port.
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
// ==========================================
// Annotation WebSocket
// Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
// Independent from websocket.auto and defaults to port 6678.
// ==========================================
"annotationWebsocket": {
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
"port": 6678 // Annotated subtitle websocket server port.
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
// ==========================================
// Logging
// Controls logging verbosity.
// Set to debug for full runtime diagnostics.
// ==========================================
"logging": {
"level": "info", // Minimum log level for runtime logging. Values: debug | info | warn | error
}, // Controls logging verbosity. Keep this as an object; do not replace with a bare string.
"level": "info" // Minimum log level for runtime logging. Values: debug | info | warn | error
}, // Controls logging verbosity.
// ==========================================
// Startup Warmups
// Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
// Disable individual warmups to defer load until first real usage.
// lowPowerMode defers all warmups except Yomitan extension.
// ==========================================
"startupWarmups": {
"lowPowerMode": false, // Defer startup warmups except Yomitan extension. Values: true | false
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
"jellyfinRemoteSession": true // Warm up Jellyfin remote session at startup. Values: true | false
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
// ==========================================
// Keyboard Shortcuts
@@ -56,7 +82,7 @@
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
"openJimaku": "Ctrl+Shift+J", // Open jimaku setting.
"openJimaku": "Ctrl+Shift+J" // Open jimaku setting.
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
// ==========================================
@@ -76,7 +102,7 @@
"secondarySub": {
"secondarySubLanguages": [], // Secondary sub languages setting.
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
"defaultMode": "hover", // Default mode setting.
"defaultMode": "hover" // Default mode setting.
}, // Dual subtitle track options.
// ==========================================
@@ -88,7 +114,7 @@
"alass_path": "", // Alass path setting.
"ffsubsync_path": "", // Ffsubsync path setting.
"ffmpeg_path": "", // Ffmpeg path setting.
"replace": true, // Replace active subtitle file when synchronization succeeds.
"replace": true // Replace the active subtitle file when sync completes. Values: true | false
}, // Subsync engine and executable paths.
// ==========================================
@@ -96,7 +122,7 @@
// Initial vertical subtitle position from the bottom.
// ==========================================
"subtitlePosition": {
"yPercent": 10, // Y percent setting.
"yPercent": 10 // Y percent setting.
}, // Initial vertical subtitle position from the bottom.
// ==========================================
@@ -108,8 +134,11 @@
"enableJlpt": false, // Enable JLPT vocabulary level underlines. When disabled, JLPT tagging lookup and underlines are skipped. Values: true | false
"preserveLineBreaks": false, // Preserve line breaks in visible overlay subtitle rendering. When false, line breaks are flattened to spaces for a single-line flow. Values: true | false
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text, then resume on leave. Values: true | false
"autoPauseVideoOnYomitanPopup": false, // Automatically pause mpv playback while Yomitan popup is open, then resume when popup closes. Values: true | false
"hoverTokenColor": "#f4dbd6", // Hex color used for hovered subtitle token highlight in mpv.
"hoverTokenBackgroundColor": "rgba(54, 58, 79, 0.84)", // CSS color used for hovered subtitle token background highlight in mpv.
"nameMatchEnabled": true, // Enable subtitle token coloring for matches from the SubMiner character dictionary. Values: true | false
"nameMatchColor": "#f5bde6", // Hex color used when a subtitle token matches an entry from the SubMiner character dictionary.
"fontFamily": "M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP", // Font family setting.
"fontSize": 35, // Font size setting.
"fontColor": "#cad3f5", // Font color setting.
@@ -130,16 +159,22 @@
"N2": "#f5a97f", // N2 setting.
"N3": "#f9e2af", // N3 setting.
"N4": "#a6e3a1", // N4 setting.
"N5": "#8aadf4", // N5 setting.
"N5": "#8aadf4" // N5 setting.
}, // Jlpt colors setting.
"frequencyDictionary": {
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
"sourcePath": "", // Optional absolute path to a frequency dictionary directory. If empty, SubMiner searches installed/default frequency-dictionary locations.
"sourcePath": "", // Optional absolute path to a frequency dictionary directory. If empty, built-in discovery search paths are used.
"topX": 1000, // Only color tokens with frequency rank <= topX (default: 1000).
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
"matchMode": "headword", // Frequency lookup text selection mode. Values: headword | surface
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#a6e3a1", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
"bandedColors": [
"#ed8796",
"#f5a97f",
"#f9e2af",
"#8bd5ca",
"#8aadf4"
] // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
}, // Frequency dictionary setting.
"secondary": {
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
@@ -154,8 +189,8 @@
"backgroundColor": "transparent", // Background color setting.
"backdropFilter": "blur(6px)", // Backdrop filter setting.
"fontWeight": "normal", // Font weight setting.
"fontStyle": "normal", // Font style setting.
}, // Secondary setting.
"fontStyle": "normal" // Font style setting.
} // Secondary setting.
}, // Primary and secondary subtitle styling.
// ==========================================
@@ -169,18 +204,20 @@
"url": "http://127.0.0.1:8765", // Url setting.
"pollingRate": 3000, // Polling interval in milliseconds.
"proxy": {
"enabled": false, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
"port": 8766, // Bind port for local AnkiConnect proxy.
"upstreamUrl": "http://127.0.0.1:8765", // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
"upstreamUrl": "http://127.0.0.1:8765" // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
}, // Proxy setting.
"tags": ["SubMiner"], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
"tags": [
"SubMiner"
], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
"fields": {
"audio": "ExpressionAudio", // Audio setting.
"image": "Picture", // Image setting.
"sentence": "Sentence", // Sentence setting.
"miscInfo": "MiscInfo", // Misc info setting.
"translation": "SelectionText", // Translation setting.
"translation": "SelectionText" // Translation setting.
}, // Fields setting.
"ai": {
"enabled": false, // Enabled setting. Values: true | false
@@ -189,7 +226,7 @@
"model": "openai/gpt-4o-mini", // Model setting.
"baseUrl": "https://openrouter.ai/api", // Base url setting.
"targetLanguage": "English", // Target language setting.
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations.", // System prompt setting.
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations." // System prompt setting.
}, // Ai setting.
"media": {
"generateAudio": true, // Generate audio setting. Values: true | false
@@ -202,7 +239,7 @@
"animatedCrf": 35, // Animated crf setting.
"audioPadding": 0.5, // Audio padding setting.
"fallbackDuration": 3, // Fallback duration setting.
"maxMediaDuration": 30, // Max media duration setting.
"maxMediaDuration": 30 // Max media duration setting.
}, // Media setting.
"behavior": {
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
@@ -210,7 +247,7 @@
"mediaInsertMode": "append", // Media insert mode setting.
"highlightWord": true, // Highlight word setting. Values: true | false
"notificationType": "osd", // Notification type setting.
"autoUpdateNewCards": true, // Automatically update newly added cards. Values: true | false
"autoUpdateNewCards": true // Automatically update newly added cards. Values: true | false
}, // Behavior setting.
"nPlusOne": {
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
@@ -219,20 +256,20 @@
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
"knownWord": "#a6da95", // Color used for legacy known-word highlights.
"knownWord": "#a6da95" // Color used for legacy known-word highlights.
}, // N plus one setting.
"metadata": {
"pattern": "[SubMiner] %f (%t)", // Pattern setting.
"pattern": "[SubMiner] %f (%t)" // Pattern setting.
}, // Metadata setting.
"isLapis": {
"enabled": false, // Enabled setting. Values: true | false
"sentenceCardModel": "Japanese sentences", // Sentence card model setting.
"sentenceCardModel": "Japanese sentences" // Sentence card model setting.
}, // Is lapis setting.
"isKiku": {
"enabled": false, // Enabled setting. Values: true | false
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
"deleteDuplicateInAuto": true, // Delete duplicate in auto setting. Values: true | false
}, // Is kiku setting.
"deleteDuplicateInAuto": true // Delete duplicate in auto setting. Values: true | false
} // Is kiku setting.
}, // Automatic Anki updates and media generation options.
// ==========================================
@@ -242,7 +279,7 @@
"jimaku": {
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
"maxEntryResults": 10, // Maximum Jimaku search results returned.
"maxEntryResults": 10 // Maximum Jimaku search results returned.
}, // Jimaku API configuration and defaults.
// ==========================================
@@ -253,16 +290,33 @@
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
"whisperModel": "", // Path to whisper model used for fallback transcription.
"primarySubLanguages": ["ja", "jpn"], // Comma-separated primary subtitle language priority used by the launcher.
"primarySubLanguages": [
"ja",
"jpn"
] // Comma-separated primary subtitle language priority used by the launcher.
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
// ==========================================
// Anilist
// Anilist API credentials and update behavior.
// Includes optional auto-sync for a merged MRU-based character dictionary in bundled Yomitan.
// Character dictionaries are keyed by AniList media ID (no season/franchise merge).
// ==========================================
"anilist": {
"enabled": false, // Enable AniList post-watch progress updates. Values: true | false
"accessToken": "", // Optional explicit AniList access token override; leave empty to use locally stored token from setup.
"characterDictionary": {
"enabled": false, // Enable automatic Yomitan character dictionary sync for currently watched AniList media. Values: true | false
"refreshTtlHours": 168, // Legacy setting; merged character dictionary retention is now usage-based and this value is ignored.
"maxLoaded": 3, // Maximum number of most-recently-used anime snapshots included in the merged Yomitan character dictionary.
"evictionPolicy": "delete", // Legacy setting; merged character dictionary eviction is usage-based and this value is ignored. Values: disable | delete
"profileScope": "all", // Yomitan profile scope for dictionary enable/disable updates. Values: all | active
"collapsibleSections": {
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
"voicedBy": false // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
} // Collapsible sections setting.
} // Character dictionary setting.
}, // Anilist API credentials and update behavior.
// ==========================================
@@ -286,8 +340,16 @@
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
"directPlayContainers": ["mkv", "mp4", "webm", "mov", "flac", "mp3", "aac"], // Container allowlist for direct play decisions.
"transcodeVideoCodec": "h264", // Preferred transcode video codec when direct play is unavailable.
"directPlayContainers": [
"mkv",
"mp4",
"webm",
"mov",
"flac",
"mp3",
"aac"
], // Container allowlist for direct play decisions.
"transcodeVideoCodec": "h264" // Preferred transcode video codec when direct play is unavailable.
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
// ==========================================
@@ -298,7 +360,7 @@
"discordPresence": {
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
"debounceMs": 750, // Debounce delay used to collapse bursty presence updates.
"debounceMs": 750 // Debounce delay used to collapse bursty presence updates.
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
// ==========================================
@@ -320,7 +382,7 @@
"telemetryDays": 30, // Telemetry retention window in days.
"dailyRollupsDays": 365, // Daily rollup retention window in days.
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
"vacuumIntervalDays": 7, // Minimum days between VACUUM runs.
}, // Retention setting.
}, // Enable/disable immersion tracking.
"vacuumIntervalDays": 7 // Minimum days between VACUUM runs.
} // Retention setting.
} // Enable/disable immersion tracking.
}

View File

@@ -1,155 +0,0 @@
# Immersion SQLite Verification Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Make the SQLite-backed immersion tracking persistence tests visible in the repo's verification surface and reproducible through at least one documented automated command.
**Architecture:** Keep the existing Bun fast lane intact for routine local verification, but add an explicit SQLite verification lane that runs the database-backed immersion tests under a runtime with `node:sqlite` support. Surface unsupported-runtime behavior clearly in the source tests and contributor docs so skipped or omitted coverage is no longer mistaken for a fully green persistence lane.
**Tech Stack:** TypeScript, Bun scripts in `package.json`, Node's built-in `node:test` and `node:sqlite`, GitHub Actions workflows, Markdown docs in `README.md`.
---
### Task 1: Audit and expose the SQLite-backed immersion test surface
**Files:**
- Modify: `src/core/services/immersion-tracker-service.test.ts`
- Modify: `src/core/services/immersion-tracker/storage-session.test.ts`
- Reference: `src/main/runtime/registry.test.ts`
**Step 1: Write the failing test**
Refactor the SQLite-gated immersion tests so missing `node:sqlite` support is reported with an explicit skip reason instead of a silent top-level `test.skip` alias.
**Step 2: Run test to verify it fails**
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
Expected: the current output shows generic skips or hides the storage-session suite from normal scripted verification, which is too opaque for contributors.
**Step 3: Write minimal implementation**
Mirror the `src/main/runtime/registry.test.ts` pattern: add a helper that either loads `DatabaseSync` or skips with a message like `requires node:sqlite support in this runtime`, then wrap each SQLite-backed test through that helper.
**Step 4: Run test to verify it passes**
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
Expected: PASS, with explicit skip messages in unsupported runtimes.
### Task 2: Add a reproducible SQLite verification command
**Files:**
- Modify: `package.json`
- Reference: `src/core/services/immersion-tracker-service.test.ts`
- Reference: `src/core/services/immersion-tracker/storage-session.test.ts`
**Step 1: Write the failing test**
Add a dedicated script contract for the SQLite-backed immersion verification lane so both persistence-heavy suites are intentionally grouped and runnable together.
**Step 2: Run test to verify it fails**
Run: `bun run test:immersion:sqlite`
Expected: FAIL because no such reproducible lane exists yet.
**Step 3: Write minimal implementation**
Update `package.json` with explicit scripts for the SQLite lane. Prefer a command shape that actually executes the built JS tests under Node with `node:sqlite` support, for example:
- `test:immersion:sqlite:dist`: `node --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js`
- `test:immersion:sqlite`: `bun run build && bun run test:immersion:sqlite:dist`
If build cost or runtime behavior requires a small adjustment, keep the core contract the same: one documented command must run both SQLite-backed immersion suites end-to-end.
**Step 4: Run test to verify it passes**
Run: `bun run test:immersion:sqlite`
Expected: PASS in a Node runtime with `node:sqlite`, executing both persistence suites without Bun-only skips.
### Task 3: Wire the SQLite lane into automated verification
**Files:**
- Modify: `.github/workflows/ci.yml`
- Modify: `.github/workflows/release.yml`
- Reference: `package.json`
**Step 1: Write the failing test**
Add the new SQLite immersion lane to the repo's automated verification so contributors and CI can rely on a real persistence check rather than the Bun fast lane alone.
**Step 2: Run test to verify it fails**
Run: `bun run test:immersion:sqlite`
Expected: local command may pass, but CI/release workflows still omit the lane entirely.
**Step 3: Write minimal implementation**
Update both workflows to provision a Node version with `node:sqlite` support before the SQLite lane runs, then execute `bun run test:immersion:sqlite` in the quality gate after the bundle build produces `dist/**` test files.
**Step 4: Run test to verify it passes**
Run: `bun run test:immersion:sqlite`
Expected: PASS locally, and workflow definitions clearly show the SQLite lane as part of automated verification.
### Task 4: Document contributor-facing prerequisites and commands
**Files:**
- Modify: `README.md`
- Reference: `package.json`
- Reference: `.github/workflows/ci.yml`
**Step 1: Write the failing test**
Extend the verification docs so contributors can discover the SQLite lane, know why the Bun source lane may skip those cases, and understand which command reproduces the persistence coverage.
**Step 2: Run test to verify it fails**
Run: `grep -n "test:immersion:sqlite" README.md`
Expected: FAIL because the dedicated immersion SQLite lane is undocumented.
**Step 3: Write minimal implementation**
Update `README.md` to document:
- the Bun fast/default lane versus the SQLite persistence lane
- the `node:sqlite` prerequisite for the reproducible command
- that the dedicated lane covers session persistence/finalization behavior beyond seam tests
**Step 4: Run test to verify it passes**
Run: `grep -n "test:immersion:sqlite" README.md && grep -n "node:sqlite" README.md`
Expected: PASS, with clear contributor guidance.
### Task 5: Verify persistence coverage end-to-end
**Files:**
- Test: `src/core/services/immersion-tracker-service.test.ts`
- Test: `src/core/services/immersion-tracker/storage-session.test.ts`
- Reference: `README.md`
- Reference: `package.json`
**Step 1: Write the failing test**
Prove the final lane exercises real DB-backed persistence/finalization paths, not just the seam tests.
**Step 2: Run test to verify it fails**
Run: `bun run test:immersion:sqlite`
Expected: before implementation, the command does not exist or does not cover both SQLite-backed suites.
**Step 3: Write minimal implementation**
Keep the dedicated lane pointed at both existing SQLite-backed test files so it covers representative finalization and persistence behavior such as:
- `destroy finalizes active session and persists final telemetry`
- `start/finalize session updates ended_at and status`
- `executeQueuedWrite inserts event and telemetry rows`
**Step 4: Run test to verify it passes**
Run: `bun run test:immersion:sqlite`
Expected: PASS, with those DB-backed persistence/finalization cases executing successfully under Node.

View File

@@ -1,92 +0,0 @@
# Merged Character Dictionary Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Replace per-anime character dictionary imports with one merged Yomitan dictionary driven by MRU usage retention.
**Architecture:** Persist normalized per-media character dictionary snapshots locally, maintain MRU retained media ids in auto-sync state, and rebuild a single merged Yomitan zip only when the retained set changes. Keep external AniList fetches only for media without a local snapshot; normal revisits stay local.
**Tech Stack:** TypeScript, Bun test, Node fs/path, existing Yomitan zip generation helpers.
---
### Task 1: Lock in merged auto-sync behavior
**Files:**
- Modify: `src/main/runtime/character-dictionary-auto-sync.test.ts`
- Test: `src/main/runtime/character-dictionary-auto-sync.test.ts`
**Step 1: Write the failing test**
Add tests for:
- single merged dictionary title/import replacing per-media imports
- MRU reorder causing rebuild only when order changes
- unchanged revisit skipping rebuild/import
- capped retained set evicting least-recently-used media
**Step 2: Run test to verify it fails**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
Expected: FAIL on old per-media import assumptions / missing merged behavior
**Step 3: Write minimal implementation**
Update auto-sync runtime to track retained media ids and merged revision/hash, call merged zip builder, and replace one imported Yomitan dictionary.
**Step 4: Run test to verify it passes**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
Expected: PASS
### Task 2: Add snapshot + merged-zip runtime support
**Files:**
- Modify: `src/main/character-dictionary-runtime.ts`
- Modify: `src/main/character-dictionary-runtime.test.ts`
- Test: `src/main/character-dictionary-runtime.test.ts`
**Step 1: Write the failing test**
Add tests for:
- saving/loading normalized per-media snapshots without per-media zip cache
- building merged zip from retained media snapshots with stable dictionary title
- preserving images/terms from multiple media in merged output
**Step 2: Run test to verify it fails**
Run: `bun test src/main/character-dictionary-runtime.test.ts`
Expected: FAIL because snapshot/merged APIs do not exist yet
**Step 3: Write minimal implementation**
Refactor dictionary runtime to expose snapshot generation/loading and merged zip building from stored metadata/images.
**Step 4: Run test to verify it passes**
Run: `bun test src/main/character-dictionary-runtime.test.ts`
Expected: PASS
### Task 3: Wire app/runtime config and docs
**Files:**
- Modify: `src/main.ts`
- Modify: `src/config/definitions/options-integrations.ts`
- Modify: `README.md`
**Step 1: Write the failing test**
Add or update tests if needed for new dependency wiring / docs-adjacent config description expectations.
**Step 2: Run test to verify it fails**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts`
Expected: FAIL until wiring matches merged flow
**Step 3: Write minimal implementation**
Swap app wiring to new snapshot + merged build API, update config/docs text from TTL semantics to usage-based merged retention.
**Step 4: Run test to verify it passes**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts && bun run tsc --noEmit`
Expected: PASS

View File

@@ -1,121 +0,0 @@
# Subtitle Sync Verification Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Replace the no-op `test:subtitle` lane with real automated subtitle-sync verification that reuses the maintained subsync tests and documents the real contributor workflow.
**Architecture:** Repoint the subtitle verification command at the existing source-level subsync tests instead of inventing a second hidden suite. Add one focused ffsubsync failure-path test so the subtitle lane explicitly covers both engines plus a non-happy path, then update contributor docs to describe the dedicated subtitle lane and how it relates to `test:core`.
**Tech Stack:** TypeScript, Bun test, Node test/assert, npm package scripts, Markdown docs.
---
### Task 1: Lock subtitle lane to real subsync tests
**Files:**
- Modify: `package.json`
**Step 1: Write the failing test**
Define the intended command shape first: `test:subtitle:src` should run `src/core/services/subsync.test.ts` and `src/subsync/utils.test.ts`, `test:subtitle` should invoke that real source lane, and no placeholder echo should remain.
**Step 2: Run test to verify it fails**
Run: `bun run test:subtitle`
Expected: It performs a build and prints `Subtitle tests are currently not configured`, proving the lane is still a no-op.
**Step 3: Write minimal implementation**
Update `package.json` so:
- `test:subtitle:src` runs `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`
- `test:subtitle` runs the new source lane directly
- `test:subtitle:dist` is removed if it is no longer the real verification path
**Step 4: Run test to verify it passes**
Run: `bun run test:subtitle`
Expected: PASS with Bun executing the real subtitle-sync test files.
### Task 2: Add explicit ffsubsync non-happy-path coverage
**Files:**
- Modify: `src/core/services/subsync.test.ts`
- Test: `src/core/services/subsync.test.ts`
**Step 1: Write the failing test**
Add a test that runs `runSubsyncManual({ engine: 'ffsubsync' })` with a stub ffsubsync executable that exits non-zero and writes stderr, then assert:
- `result.ok === false`
- `result.message` starts with `ffsubsync synchronization failed`
- the failure message includes command details surfaced to the user
**Step 2: Run test to verify it fails**
Run: `bun test src/core/services/subsync.test.ts`
Expected: FAIL because ffsubsync failure propagation is not asserted yet.
**Step 3: Write minimal implementation**
Keep production code unchanged unless the new test exposes a real bug. If needed, tighten failure assertions or message propagation in `src/core/services/subsync.ts` without changing successful behavior.
**Step 4: Run test to verify it passes**
Run: `bun test src/core/services/subsync.test.ts`
Expected: PASS with both alass and ffsubsync paths covered, including a non-happy path.
### Task 3: Make contributor docs match the real verification path
**Files:**
- Modify: `README.md`
- Modify: `package.json`
**Step 1: Write the failing test**
Use the repository state as the failure signal: README currently advertises subtitle sync as a feature but does not tell contributors that `bun run test:subtitle` is the real verification lane.
**Step 2: Run test to verify it fails**
Run: `bun run test:subtitle && bun test src/subsync/utils.test.ts`
Expected: Tests pass, but docs still do not explain the lane; this is the remaining acceptance-criteria gap.
**Step 3: Write minimal implementation**
Update `README.md` with a short contributor-facing verification note that:
- points to `bun run test:subtitle` for subtitle-sync coverage
- states that the lane reuses the maintained subsync tests already included in broader core coverage
- avoids implying there is a separate hidden subtitle test harness beyond those tests
**Step 4: Run test to verify it passes**
Run: `bun run test:subtitle`
Expected: PASS, with docs and scripts now aligned around the same subtitle verification strategy.
### Task 4: Verify matrix integration stays clean
**Files:**
- Modify: `package.json` (only if Task 1/3 exposed cleanup needs)
**Step 1: Write the failing test**
Treat duplication as the failure condition: confirm the dedicated subtitle lane reuses the same maintained files already present in `test:core:src` rather than creating a second divergent suite.
**Step 2: Run test to verify it fails**
Run: `bun run test:subtitle && bun run test:core:src`
Expected: If file lists diverge unexpectedly, this review step exposes it before handoff.
**Step 3: Write minimal implementation**
If needed, do the smallest script cleanup necessary so subtitle coverage remains explicit without hiding or duplicating existing core coverage.
**Step 4: Run test to verify it passes**
Run: `bun run test:subtitle && bun run test:core:src`
Expected: PASS, confirming the dedicated lane and the broader core suite agree on subtitle coverage.

View File

@@ -1,169 +0,0 @@
# Testing Workflow Test Matrix Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Make the standard test commands reflect the maintained test surface so newly added tests are discovered automatically or intentionally documented outside the default lane.
**Architecture:** Replace the current hand-maintained file allowlists in `package.json` with directory-based Bun test lanes that map to maintained test surfaces. Keep the default developer lane fast, move slower or environment-specific checks into explicit commands, and document the resulting matrix in `README.md` so contributors know exactly which command to run.
**Tech Stack:** TypeScript, Bun test, npm-style package scripts in `package.json`, Markdown docs in `README.md`.
---
### Task 1: Lock in the desired script matrix with failing tests/audit checks
**Files:**
- Modify: `package.json`
- Test: `package.json`
- Reference: `src/main-entry-runtime.test.ts`
- Reference: `src/anki-integration/anki-connect-proxy.test.ts`
- Reference: `src/main/runtime/registry.test.ts`
**Step 1: Write the failing test**
Add a new script structure in `package.json` expectations by editing the script map so these lanes exist conceptually:
- `test:fast` for default fast verification
- `test:full` for the maintained source test surface
- `test:env` for environment-specific checks
The fast lane should stay selective and intentional. The full lane should use directory-based discovery rather than file-by-file allowlists, with representative coverage from:
- `src/main-entry-runtime.test.ts`
- `src/anki-integration/**/*.test.ts`
- `src/main/**/*.test.ts`
- `launcher/**/*.test.ts`
**Step 2: Run test to verify it fails**
Run: `bun run test:full`
Expected: FAIL because `test:full` does not exist yet, and previously omitted maintained tests are still outside the standard matrix.
**Step 3: Write minimal implementation**
Update `package.json` scripts so:
- `test` points at `test:fast`
- `test:fast` runs the fast default lane only
- `test:full` runs directory-based maintained suites instead of file allowlists
- `test:env` runs environment-specific verification (for example launcher/plugin and sqlite-gated suites)
- subsystem scripts use stable path globs or directory arguments so new tests are discovered automatically
Prefer commands like these, adjusted only as needed for Bun behavior in this repo:
- `bun test src/config/**/*.test.ts`
- `bun test src/{cli,core,renderer,subtitle,subsync,main,anki-integration}/*.test.ts ...` only if Bun cannot take the broader directory directly
- `bun test launcher/**/*.test.ts`
Do not keep large hand-maintained file enumerations for maintained unit/integration lanes.
**Step 4: Run test to verify it passes**
Run: `bun run test:full`
Expected: PASS, including automated execution of representative tests that were previously omitted from the standard matrix.
### Task 2: Separate environment-specific verification from the maintained default/full lanes
**Files:**
- Modify: `package.json`
- Test: `src/main/runtime/registry.test.ts`
- Test: `launcher/smoke.e2e.test.ts`
- Test: `src/core/services/immersion-tracker-service.test.ts`
**Step 1: Write the failing test**
Refine the package scripts so environment-specific checks are explicitly grouped outside the default fast lane. Treat these as the primary environment-specific examples unless repo behavior proves a better split during execution:
- launcher smoke/plugin checks that rely on local process or Lua execution
- sqlite-dependent checks that may skip when `node:sqlite` is unavailable
**Step 2: Run test to verify it fails**
Run: `bun run test:env`
Expected: FAIL because the environment-specific lane is not defined yet.
**Step 3: Write minimal implementation**
Add explicit environment-specific scripts in `package.json`, such as:
- a launcher/plugin lane that runs `launcher/smoke.e2e.test.ts` plus `lua scripts/test-plugin-start-gate.lua`
- a sqlite lane for tests that require `node:sqlite` support or otherwise need environment notes
- an aggregate `test:env` command that runs all environment-specific lanes
Keep these lanes documented and reproducible rather than silently excluded.
**Step 4: Run test to verify it passes**
Run: `bun run test:env`
Expected: PASS in supported environments, or clear documented skip behavior where the tests themselves intentionally gate on missing runtime support.
### Task 3: Document contributor-facing test commands and matrix
**Files:**
- Modify: `README.md`
- Reference: `package.json`
**Step 1: Write the failing test**
Add a contributor-focused testing section requirement in `README.md` expectations:
- fast verification command
- full verification command
- environment-specific verification command
- plain-language explanation of which suites each lane covers and why
**Step 2: Run test to verify it fails**
Run: `grep -n "Testing" README.md`
Expected: no contributor testing matrix section exists yet.
**Step 3: Write minimal implementation**
Update `README.md` with a concise `Testing` section that documents:
- `bun run test` / `bun run test:fast` for fast local verification
- `bun run test:full` for the maintained source test surface
- `bun run test:env` for environment-specific verification
- any important notes about sqlite-gated tests and launcher/plugin checks
Keep the matrix concrete and reproducible.
**Step 4: Run test to verify it passes**
Run: `grep -n "Testing" README.md && grep -n "test:full" README.md && grep -n "test:env" README.md`
Expected: PASS with the new contributor-facing matrix present.
### Task 4: Verify representative omitted suites now belong to automated lanes
**Files:**
- Test: `src/main-entry-runtime.test.ts`
- Test: `src/anki-integration/anki-connect-proxy.test.ts`
- Test: `src/main/runtime/registry.test.ts`
- Reference: `package.json`
- Reference: `README.md`
**Step 1: Write the failing test**
Use targeted command checks to prove these previously omitted surfaces are now in the matrix:
- entry/runtime: `src/main-entry-runtime.test.ts`
- Anki integration: `src/anki-integration/anki-connect-proxy.test.ts`
- main runtime: `src/main/runtime/registry.test.ts`
**Step 2: Run test to verify it fails**
Run: `bun run test:full src/main-entry-runtime.test.ts`
Expected: either unsupported invocation or evidence that the current matrix still does not include these surfaces automatically.
**Step 3: Write minimal implementation**
Adjust the final script paths/globs until the full matrix includes those representative surfaces without file-by-file script maintenance.
**Step 4: Run test to verify it passes**
Run: `bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/main/runtime/registry.test.ts && bun run test:fast && bun run test:full`
Expected: PASS, with at least one representative test from each required surface executing through the documented automated lanes.

View File

@@ -1,5 +1,6 @@
import fs from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import { spawn } from 'node:child_process';
import { fail, log } from '../log.js';
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
@@ -14,6 +15,15 @@ import {
import { generateYoutubeSubtitles } from '../youtube.js';
import type { Args } from '../types.js';
import type { LauncherCommandContext } from './context.js';
import { ensureLauncherSetupReady } from '../setup-gate.js';
import {
getDefaultConfigDir,
getSetupStatePath,
readSetupState,
} from '../../src/shared/setup-state.js';
const SETUP_WAIT_TIMEOUT_MS = 10 * 60 * 1000;
const SETUP_POLL_INTERVAL_MS = 500;
function checkDependencies(args: Args): void {
const missing: string[] = [];
@@ -85,12 +95,47 @@ function registerCleanup(context: LauncherCommandContext): void {
});
}
async function ensurePlaybackSetupReady(context: LauncherCommandContext): Promise<void> {
const { args, appPath } = context;
if (!appPath) return;
const configDir = getDefaultConfigDir({
xdgConfigHome: process.env.XDG_CONFIG_HOME,
homeDir: os.homedir(),
});
const statePath = getSetupStatePath(configDir);
const ready = await ensureLauncherSetupReady({
readSetupState: () => readSetupState(statePath),
launchSetupApp: () => {
const setupArgs = ['--background', '--setup'];
if (args.logLevel) {
setupArgs.push('--log-level', args.logLevel);
}
const child = spawn(appPath, setupArgs, {
detached: true,
stdio: 'ignore',
});
child.unref();
},
sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
now: () => Date.now(),
timeoutMs: SETUP_WAIT_TIMEOUT_MS,
pollIntervalMs: SETUP_POLL_INTERVAL_MS,
});
if (!ready) {
fail('SubMiner setup is incomplete. Complete setup in the app, then retry playback.');
}
}
export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> {
const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context;
if (!appPath) {
fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.');
}
await ensurePlaybackSetupReady(context);
if (!args.target) {
checkPickerDependencies(args);
}

107
launcher/setup-gate.test.ts Normal file
View File

@@ -0,0 +1,107 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { ensureLauncherSetupReady, waitForSetupCompletion } from './setup-gate';
import type { SetupState } from '../src/shared/setup-state';
test('waitForSetupCompletion resolves completed and cancelled states', async () => {
const sequence: Array<SetupState | null> = [
null,
{
version: 1,
status: 'in_progress',
completedAt: null,
completionSource: null,
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'unknown',
pluginInstallPathSummary: null,
},
{
version: 1,
status: 'completed',
completedAt: '2026-03-07T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 1,
pluginInstallStatus: 'skipped',
pluginInstallPathSummary: null,
},
];
const result = await waitForSetupCompletion({
readSetupState: () => sequence.shift() ?? null,
sleep: async () => undefined,
now: (() => {
let value = 0;
return () => (value += 100);
})(),
timeoutMs: 5_000,
pollIntervalMs: 100,
});
assert.equal(result, 'completed');
});
test('ensureLauncherSetupReady launches setup app and resumes only after completion', async () => {
const calls: string[] = [];
let reads = 0;
const ready = await ensureLauncherSetupReady({
readSetupState: () => {
reads += 1;
if (reads === 1) return null;
if (reads === 2) {
return {
version: 1,
status: 'in_progress',
completedAt: null,
completionSource: null,
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'unknown',
pluginInstallPathSummary: null,
};
}
return {
version: 1,
status: 'completed',
completedAt: '2026-03-07T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 1,
pluginInstallStatus: 'installed',
pluginInstallPathSummary: '/tmp/mpv',
};
},
launchSetupApp: () => {
calls.push('launch');
},
sleep: async () => undefined,
now: (() => {
let value = 0;
return () => (value += 100);
})(),
timeoutMs: 5_000,
pollIntervalMs: 100,
});
assert.equal(ready, true);
assert.deepEqual(calls, ['launch']);
});
test('ensureLauncherSetupReady fails on timeout/cancelled state', async () => {
const result = await ensureLauncherSetupReady({
readSetupState: () => ({
version: 1,
status: 'cancelled',
completedAt: null,
completionSource: null,
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'unknown',
pluginInstallPathSummary: null,
}),
launchSetupApp: () => undefined,
sleep: async () => undefined,
now: () => 0,
timeoutMs: 5_000,
pollIntervalMs: 100,
});
assert.equal(result, false);
});

41
launcher/setup-gate.ts Normal file
View File

@@ -0,0 +1,41 @@
import { isSetupCompleted, type SetupState } from '../src/shared/setup-state.js';
export async function waitForSetupCompletion(deps: {
readSetupState: () => SetupState | null;
sleep: (ms: number) => Promise<void>;
now: () => number;
timeoutMs: number;
pollIntervalMs: number;
}): Promise<'completed' | 'cancelled' | 'timeout'> {
const deadline = deps.now() + deps.timeoutMs;
while (deps.now() <= deadline) {
const state = deps.readSetupState();
if (isSetupCompleted(state)) {
return 'completed';
}
if (state?.status === 'cancelled') {
return 'cancelled';
}
await deps.sleep(deps.pollIntervalMs);
}
return 'timeout';
}
export async function ensureLauncherSetupReady(deps: {
readSetupState: () => SetupState | null;
launchSetupApp: () => void;
sleep: (ms: number) => Promise<void>;
now: () => number;
timeoutMs: number;
pollIntervalMs: number;
}): Promise<boolean> {
if (isSetupCompleted(deps.readSetupState())) {
return true;
}
deps.launchSetupApp();
const result = await waitForSetupCompletion(deps);
return result === 'completed';
}

View File

@@ -4,6 +4,13 @@ import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { spawn, spawnSync } from 'node:child_process';
import {
createDefaultSetupState,
getDefaultConfigDir,
getSetupStatePath,
readSetupState,
writeSetupState,
} from '../src/shared/setup-state.js';
type RunResult = {
status: number | null;
@@ -25,6 +32,9 @@ type SmokeCase = {
mpvOverlayLogPath: string;
};
const LAUNCHER_RUN_TIMEOUT_MS = 25000;
const LONG_SMOKE_TEST_TIMEOUT_MS = 30000;
function writeExecutable(filePath: string, body: string): void {
fs.writeFileSync(filePath, body);
fs.chmodSync(filePath, 0o755);
@@ -55,6 +65,13 @@ function createSmokeCase(name: string): SmokeCase {
`socket_path=${socketPath}\n`,
);
const configDir = getDefaultConfigDir({ xdgConfigHome, homeDir });
const setupState = createDefaultSetupState();
setupState.status = 'completed';
setupState.completedAt = '2026-03-07T00:00:00.000Z';
setupState.completionSource = 'user';
writeSetupState(getSetupStatePath(configDir), setupState);
const fakeMpvLogPath = path.join(artifactsDir, 'fake-mpv.log');
const fakeAppLogPath = path.join(artifactsDir, 'fake-app.log');
const fakeAppStartLogPath = path.join(artifactsDir, 'fake-app-start.log');
@@ -162,7 +179,7 @@ function runLauncher(
{
env,
encoding: 'utf8',
timeout: 15000,
timeout: LAUNCHER_RUN_TIMEOUT_MS,
},
);
@@ -221,6 +238,22 @@ async function waitForJsonLines(
}
}
test('launcher smoke fixture seeds completed setup state', () => {
const smokeCase = createSmokeCase('setup-state');
try {
const configDir = getDefaultConfigDir({
xdgConfigHome: smokeCase.xdgConfigHome,
homeDir: smokeCase.homeDir,
});
const statePath = getSetupStatePath(configDir);
assert.equal(readSetupState(statePath)?.status, 'completed');
} finally {
fs.rmSync(smokeCase.root, { recursive: true, force: true });
fs.rmSync(smokeCase.socketDir, { recursive: true, force: true });
}
});
test('launcher mpv status returns ready when socket is connectable', async () => {
await withSmokeCase('mpv-status', async (smokeCase) => {
const env = makeTestEnv(smokeCase);
@@ -263,7 +296,7 @@ test('launcher mpv status returns ready when socket is connectable', async () =>
test(
'launcher start-overlay run forwards socket/backend and stops overlay after mpv exits',
{ timeout: 20000 },
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
async () => {
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
const env = makeTestEnv(smokeCase);
@@ -322,7 +355,7 @@ test(
test(
'launcher starts mpv paused when plugin auto-start visible overlay gate is enabled',
{ timeout: 20000 },
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
async () => {
await withSmokeCase('autoplay-ready-gate', async (smokeCase) => {
fs.writeFileSync(

View File

@@ -1,6 +1,6 @@
{
"name": "subminer",
"version": "0.3.0",
"version": "0.4.0",
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"packageManager": "bun@1.3.5",
"main": "dist/main-entry.js",
@@ -8,21 +8,24 @@
"typecheck": "tsc --noEmit -p tsconfig.typecheck.json",
"typecheck:watch": "tsc --watch --preserveWatchOutput -p tsconfig.typecheck.json",
"get-frequency": "bun run scripts/get_frequency.ts --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
"get-frequency:electron": "bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
"test-yomitan-parser:electron": "bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
"build": "tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
"build:yomitan": "node scripts/build-yomitan.mjs",
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
"format": "prettier --write .",
"format:check": "prettier --check .",
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts",
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js",
"format:src": "bash scripts/prettier-scope.sh --write",
"format:check:src": "bash scripts/prettier-scope.sh --check",
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
@@ -117,7 +120,7 @@
],
"extraResources": [
{
"from": "vendor/yomitan",
"from": "build/yomitan",
"to": "yomitan"
},
{
@@ -128,6 +131,14 @@
"from": "assets",
"to": "assets"
},
{
"from": "plugin/subminer",
"to": "plugin/subminer"
},
{
"from": "plugin/subminer.conf",
"to": "plugin/subminer.conf"
},
{
"from": "dist/scripts/get-mpv-window-macos",
"to": "scripts/get-mpv-window-macos"

144
scripts/build-yomitan.mjs Normal file
View File

@@ -0,0 +1,144 @@
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { createHash } from 'node:crypto';
import { execFileSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
const dirname = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(dirname, '..');
const submoduleDir = path.join(repoRoot, 'vendor', 'subminer-yomitan');
const submodulePackagePath = path.join(submoduleDir, 'package.json');
const submodulePackageLockPath = path.join(submoduleDir, 'package-lock.json');
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
function run(command, args, cwd) {
execFileSync(command, args, { cwd, stdio: 'inherit' });
}
function readCommand(command, args, cwd) {
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
}
function readStamp() {
try {
return JSON.parse(fs.readFileSync(stampPath, 'utf8'));
} catch {
return null;
}
}
function hashFile(filePath) {
const hash = createHash('sha256');
hash.update(fs.readFileSync(filePath));
return hash.digest('hex');
}
function ensureSubmodulePresent() {
if (!fs.existsSync(submodulePackagePath)) {
throw new Error(
'Missing vendor/subminer-yomitan submodule. Run `git submodule update --init --recursive`.',
);
}
}
function getSourceState() {
const revision = readCommand('git', ['rev-parse', 'HEAD'], submoduleDir);
const dirty = readCommand('git', ['status', '--short', '--untracked-files=no'], submoduleDir);
return { revision, dirty };
}
function isBuildCurrent(force) {
if (force) {
return false;
}
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
return false;
}
const stamp = readStamp();
if (!stamp) {
return false;
}
const currentState = getSourceState();
return stamp.revision === currentState.revision && stamp.dirty === currentState.dirty;
}
function ensureDependenciesInstalled() {
const nodeModulesDir = path.join(submoduleDir, 'node_modules');
const currentLockHash = hashFile(submodulePackageLockPath);
let installedLockHash = '';
try {
installedLockHash = fs.readFileSync(dependencyStampPath, 'utf8').trim();
} catch {}
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
run(npmCommand, ['ci'], submoduleDir);
fs.mkdirSync(nodeModulesDir, { recursive: true });
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
}
}
function installAndBuild() {
ensureDependenciesInstalled();
run(npmCommand, ['run', 'build', '--', '--target', 'chrome'], submoduleDir);
}
function extractBuild() {
if (!fs.existsSync(zipPath)) {
throw new Error(`Expected Yomitan build artifact at ${zipPath}`);
}
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
try {
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
fs.rmSync(buildOutputDir, { recursive: true, force: true });
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
fs.cpSync(tempDir, buildOutputDir, { recursive: true });
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
throw new Error(`Extracted Yomitan build missing manifest.json in ${buildOutputDir}`);
}
} finally {
fs.rmSync(tempDir, { recursive: true, force: true });
}
}
function writeStamp() {
const state = getSourceState();
fs.writeFileSync(
stampPath,
`${JSON.stringify(
{
revision: state.revision,
dirty: state.dirty,
builtAt: new Date().toISOString(),
},
null,
2,
)}\n`,
'utf8',
);
}
function main() {
const force = process.argv.includes('--force');
ensureSubmodulePresent();
if (isBuildCurrent(force)) {
process.stdout.write(`Yomitan build current: ${buildOutputDir}\n`);
return;
}
process.stdout.write('Building Yomitan Chrome artifact...\n');
installAndBuild();
extractBuild();
writeStamp();
process.stdout.write(`Yomitan extracted to ${buildOutputDir}\n`);
}
main();

View File

@@ -4,6 +4,7 @@ import process from 'node:process';
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
import { createFrequencyDictionaryLookup } from '../src/core/services/frequency-dictionary.js';
import { resolveYomitanExtensionPath as resolveBuiltYomitanExtensionPath } from '../src/core/services/yomitan-extension-paths.js';
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
import type { MergedToken, FrequencyDictionaryLookup } from '../src/types.js';
@@ -48,7 +49,7 @@ function parseCliArgs(argv: string[]): CliOptions {
let colorBand1 = '#ed8796';
let colorBand2 = '#f5a97f';
let colorBand3 = '#f9e2af';
let colorBand4 = '#a6e3a1';
let colorBand4 = '#8bd5ca';
let colorBand5 = '#8aadf4';
let colorKnown = '#a6da95';
let colorNPlusOne = '#c6a0f6';
@@ -94,7 +95,7 @@ function parseCliArgs(argv: string[]): CliOptions {
if (!next) {
throw new Error('Missing value for --yomitan-extension');
}
yomitanExtensionPath = path.resolve(next);
yomitanExtensionPath = next;
continue;
}
@@ -103,7 +104,7 @@ function parseCliArgs(argv: string[]): CliOptions {
if (!next) {
throw new Error('Missing value for --yomitan-user-data');
}
yomitanUserDataPath = path.resolve(next);
yomitanUserDataPath = next;
continue;
}
@@ -225,12 +226,12 @@ function parseCliArgs(argv: string[]): CliOptions {
}
if (arg.startsWith('--yomitan-extension=')) {
yomitanExtensionPath = path.resolve(arg.slice('--yomitan-extension='.length));
yomitanExtensionPath = arg.slice('--yomitan-extension='.length);
continue;
}
if (arg.startsWith('--yomitan-user-data=')) {
yomitanUserDataPath = path.resolve(arg.slice('--yomitan-user-data='.length));
yomitanUserDataPath = arg.slice('--yomitan-user-data='.length);
continue;
}
@@ -524,7 +525,10 @@ function destroyUnknownParserWindow(window: unknown): void {
}
}
async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanRuntimeState> {
async function createYomitanRuntimeState(
userDataPath: string,
extensionPath?: string,
): Promise<YomitanRuntimeState> {
const state: YomitanRuntimeState = {
yomitanExt: null,
parserWindow: null,
@@ -547,6 +551,7 @@ async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanR
const loadYomitanExtension = (await import('../src/core/services/yomitan-extension-loader.js'))
.loadYomitanExtension as (options: {
userDataPath: string;
extensionPath?: string;
getYomitanParserWindow: () => unknown;
setYomitanParserWindow: (window: unknown) => void;
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
@@ -556,6 +561,7 @@ async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanR
const extension = await loadYomitanExtension({
userDataPath,
extensionPath,
getYomitanParserWindow: () => state.parserWindow,
setYomitanParserWindow: (window) => {
state.parserWindow = window;
@@ -589,17 +595,16 @@ async function createYomitanRuntimeStateWithSearch(
userDataPath: string,
extensionPath?: string,
): Promise<YomitanRuntimeState> {
const preferredPath = extensionPath ? path.resolve(extensionPath) : undefined;
const defaultVendorPath = path.resolve(process.cwd(), 'vendor', 'yomitan');
const candidates = [...(preferredPath ? [preferredPath] : []), defaultVendorPath];
const resolvedExtensionPath = resolveBuiltYomitanExtensionPath({
explicitPath: extensionPath,
cwd: process.cwd(),
});
const candidates = resolvedExtensionPath ? [resolvedExtensionPath] : [];
for (const candidate of candidates) {
if (!candidate) {
continue;
}
try {
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
const state = await createYomitanRuntimeState(userDataPath);
const state = await createYomitanRuntimeState(userDataPath, candidate);
if (state.available) {
return state;
}
@@ -613,7 +618,7 @@ async function createYomitanRuntimeStateWithSearch(
}
}
return createYomitanRuntimeState(userDataPath);
return createYomitanRuntimeState(userDataPath, resolvedExtensionPath ?? undefined);
}
async function getFrequencyLookup(dictionaryPath: string): Promise<FrequencyDictionaryLookup> {

View File

@@ -1,261 +1,16 @@
#!/bin/bash
#
# SubMiner - All-in-one sentence mining overlay
# Copyright (C) 2024 sudacode
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# patch-yomitan.sh - Apply Electron compatibility patches to Yomitan
#
# This script applies the necessary patches to make Yomitan work in Electron
# after upgrading to a new version. Run this after extracting a fresh Yomitan release.
#
# Usage: ./patch-yomitan.sh [yomitan_dir]
# yomitan_dir: Path to the Yomitan directory (default: vendor/yomitan)
#
set -e
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
YOMITAN_DIR="${1:-$SCRIPT_DIR/../vendor/yomitan}"
cat <<'EOF'
patch-yomitan.sh is retired.
if [ ! -d "$YOMITAN_DIR" ]; then
echo "Error: Yomitan directory not found: $YOMITAN_DIR"
exit 1
fi
SubMiner now uses the forked source submodule at vendor/subminer-yomitan and builds the
Chromium extension artifact into build/yomitan.
echo "Patching Yomitan in: $YOMITAN_DIR"
Use:
git submodule update --init --recursive
bun run build:yomitan
PERMISSIONS_UTIL="$YOMITAN_DIR/js/data/permissions-util.js"
if [ ! -f "$PERMISSIONS_UTIL" ]; then
echo "Error: permissions-util.js not found at $PERMISSIONS_UTIL"
exit 1
fi
echo "Patching permissions-util.js..."
if grep -q "Electron workaround" "$PERMISSIONS_UTIL"; then
echo " - Already patched, skipping"
else
cat > "$PERMISSIONS_UTIL.tmp" << 'PATCH_EOF'
/*
* Copyright (C) 2023-2025 Yomitan Authors
* Copyright (C) 2021-2022 Yomichan Authors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
import {getFieldMarkers} from './anki-util.js';
/**
* This function returns whether an Anki field marker might require clipboard permissions.
* This is speculative and may not guarantee that the field marker actually does require the permission,
* as the custom handlebars template is not deeply inspected.
* @param {string} marker
* @returns {boolean}
*/
function ankiFieldMarkerMayUseClipboard(marker) {
switch (marker) {
case 'clipboard-image':
case 'clipboard-text':
return true;
default:
return false;
}
}
/**
* @param {chrome.permissions.Permissions} permissions
* @returns {Promise<boolean>}
*/
export function hasPermissions(permissions) {
return new Promise((resolve, reject) => {
chrome.permissions.contains(permissions, (result) => {
const e = chrome.runtime.lastError;
if (e) {
reject(new Error(e.message));
} else {
resolve(result);
}
});
});
}
/**
* @param {chrome.permissions.Permissions} permissions
* @param {boolean} shouldHave
* @returns {Promise<boolean>}
*/
export function setPermissionsGranted(permissions, shouldHave) {
return (
shouldHave ?
new Promise((resolve, reject) => {
chrome.permissions.request(permissions, (result) => {
const e = chrome.runtime.lastError;
if (e) {
reject(new Error(e.message));
} else {
resolve(result);
}
});
}) :
new Promise((resolve, reject) => {
chrome.permissions.remove(permissions, (result) => {
const e = chrome.runtime.lastError;
if (e) {
reject(new Error(e.message));
} else {
resolve(!result);
}
});
})
);
}
/**
* @returns {Promise<chrome.permissions.Permissions>}
*/
export function getAllPermissions() {
// Electron workaround - chrome.permissions.getAll() not available
return Promise.resolve({
origins: ["<all_urls>"],
permissions: ["clipboardWrite", "storage", "unlimitedStorage", "scripting", "contextMenus"]
});
}
/**
* @param {string} fieldValue
* @returns {string[]}
*/
export function getRequiredPermissionsForAnkiFieldValue(fieldValue) {
const markers = getFieldMarkers(fieldValue);
for (const marker of markers) {
if (ankiFieldMarkerMayUseClipboard(marker)) {
return ['clipboardRead'];
}
}
return [];
}
/**
* @param {chrome.permissions.Permissions} permissions
* @param {import('settings').ProfileOptions} options
* @returns {boolean}
*/
export function hasRequiredPermissionsForOptions(permissions, options) {
const permissionsSet = new Set(permissions.permissions);
if (!permissionsSet.has('nativeMessaging') && (options.parsing.enableMecabParser || options.general.enableYomitanApi)) {
return false;
}
if (!permissionsSet.has('clipboardRead')) {
if (options.clipboard.enableBackgroundMonitor || options.clipboard.enableSearchPageMonitor) {
return false;
}
const fieldsList = options.anki.cardFormats.map((cardFormat) => cardFormat.fields);
for (const fields of fieldsList) {
for (const {value: fieldValue} of Object.values(fields)) {
const markers = getFieldMarkers(fieldValue);
for (const marker of markers) {
if (ankiFieldMarkerMayUseClipboard(marker)) {
return false;
}
}
}
}
}
return true;
}
PATCH_EOF
mv "$PERMISSIONS_UTIL.tmp" "$PERMISSIONS_UTIL"
echo " - Patched successfully"
fi
OPTIONS_SCHEMA="$YOMITAN_DIR/data/schemas/options-schema.json"
if [ ! -f "$OPTIONS_SCHEMA" ]; then
echo "Error: options-schema.json not found at $OPTIONS_SCHEMA"
exit 1
fi
echo "Patching options-schema.json..."
if grep -q '"selectText".*"default": true' "$OPTIONS_SCHEMA"; then
sed -i '/"selectText": {/,/"default":/{s/"default": true/"default": false/}' "$OPTIONS_SCHEMA"
echo " - Changed selectText default to false"
elif grep -q '"selectText".*"default": false' "$OPTIONS_SCHEMA"; then
echo " - selectText already set to false, skipping"
else
echo " - Warning: Could not find selectText setting"
fi
if grep -q '"layoutAwareScan".*"default": true' "$OPTIONS_SCHEMA"; then
sed -i '/"layoutAwareScan": {/,/"default":/{s/"default": true/"default": false/}' "$OPTIONS_SCHEMA"
echo " - Changed layoutAwareScan default to false"
elif grep -q '"layoutAwareScan".*"default": false' "$OPTIONS_SCHEMA"; then
echo " - layoutAwareScan already set to false, skipping"
else
echo " - Warning: Could not find layoutAwareScan setting"
fi
POPUP_JS="$YOMITAN_DIR/js/app/popup.js"
if [ ! -f "$POPUP_JS" ]; then
echo "Error: popup.js not found at $POPUP_JS"
exit 1
fi
echo "Patching popup.js..."
if grep -q "yomitan-popup-shown" "$POPUP_JS"; then
echo " - Already patched, skipping"
else
# Add the visibility event dispatch after the existing _onVisibleChange code
# We need to add it after: void this._invokeSafe('displayVisibilityChanged', {value});
sed -i "/void this._invokeSafe('displayVisibilityChanged', {value});/a\\
\\
// Dispatch custom events for popup visibility (Electron integration)\\
if (value) {\\
window.dispatchEvent(new CustomEvent('yomitan-popup-shown'));\\
} else {\\
window.dispatchEvent(new CustomEvent('yomitan-popup-hidden'));\\
}" "$POPUP_JS"
echo " - Added visibility events"
fi
echo ""
echo "Yomitan patching complete!"
echo ""
echo "Changes applied:"
echo " 1. permissions-util.js: Hardcoded permissions (Electron workaround)"
echo " 2. options-schema.json: selectText=false, layoutAwareScan=false"
echo " 3. popup.js: Added yomitan-popup-shown/hidden events"
echo ""
echo "To verify: Run 'bun run dev' and check for 'Yomitan extension loaded successfully'"
If you need to change Electron compatibility behavior, patch the forked source repo and rebuild.
EOF

20
scripts/prettier-scope.sh Normal file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
paths=(
"package.json"
"tsconfig.json"
"tsconfig.renderer.json"
"tsconfig.typecheck.json"
".prettierrc.json"
".github"
"build"
"launcher"
"scripts"
"src"
)
exec bunx prettier "$@" "${paths[@]}"

View File

@@ -4,6 +4,7 @@ import path from 'node:path';
import process from 'node:process';
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
import { resolveYomitanExtensionPath as resolveBuiltYomitanExtensionPath } from '../src/core/services/yomitan-extension-paths.js';
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
import type { MergedToken } from '../src/types.js';
@@ -112,12 +113,12 @@ function parseCliArgs(argv: string[]): CliOptions {
if (!next) {
throw new Error('Missing value for --yomitan-extension');
}
yomitanExtensionPath = path.resolve(next);
yomitanExtensionPath = next;
continue;
}
if (arg.startsWith('--yomitan-extension=')) {
yomitanExtensionPath = path.resolve(arg.slice('--yomitan-extension='.length));
yomitanExtensionPath = arg.slice('--yomitan-extension='.length);
continue;
}
@@ -126,12 +127,12 @@ function parseCliArgs(argv: string[]): CliOptions {
if (!next) {
throw new Error('Missing value for --yomitan-user-data');
}
yomitanUserDataPath = path.resolve(next);
yomitanUserDataPath = next;
continue;
}
if (arg.startsWith('--yomitan-user-data=')) {
yomitanUserDataPath = path.resolve(arg.slice('--yomitan-user-data='.length));
yomitanUserDataPath = arg.slice('--yomitan-user-data='.length);
continue;
}
@@ -372,21 +373,10 @@ function findSelectedCandidateIndexes(
}
function resolveYomitanExtensionPath(explicitPath?: string): string | null {
const candidates = [
explicitPath ? path.resolve(explicitPath) : null,
path.resolve(process.cwd(), 'vendor', 'yomitan'),
];
for (const candidate of candidates) {
if (!candidate) {
continue;
}
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
return candidate;
}
}
return null;
return resolveBuiltYomitanExtensionPath({
explicitPath,
cwd: process.cwd(),
});
}
async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeState> {
@@ -420,7 +410,7 @@ async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeS
const extensionPath = resolveYomitanExtensionPath(options.yomitanExtensionPath);
if (!extensionPath) {
state.note = 'no Yomitan extension directory found';
state.note = 'no built Yomitan extension directory found; run `bun run build:yomitan`';
return state;
}

View File

@@ -222,9 +222,11 @@ test('AnkiIntegration does not allocate proxy server when proxy transport is dis
);
const privateState = integration as unknown as {
proxyServer: unknown | null;
runtime: {
proxyServer: unknown | null;
};
};
assert.equal(privateState.proxyServer, null);
assert.equal(privateState.runtime.proxyServer, null);
});
test('FieldGroupingMergeCollaborator synchronizes ExpressionAudio from merged SentenceAudio', async () => {

View File

@@ -48,6 +48,7 @@ import { FieldGroupingService } from './anki-integration/field-grouping';
import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge';
import { NoteUpdateWorkflow } from './anki-integration/note-update-workflow';
import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow';
import { AnkiIntegrationRuntime, normalizeAnkiIntegrationConfig } from './anki-integration/runtime';
const log = createLogger('anki').child('integration');
@@ -113,8 +114,6 @@ export class AnkiIntegration {
private timingTracker: SubtitleTimingTracker;
private config: AnkiConnectConfig;
private pollingRunner!: PollingRunner;
private proxyServer: AnkiConnectProxyServer | null = null;
private started = false;
private previousNoteIds = new Set<number>();
private mpvClient: MpvClient;
private osdCallback: ((text: string) => void) | null = null;
@@ -135,6 +134,7 @@ export class AnkiIntegration {
private fieldGroupingService: FieldGroupingService;
private noteUpdateWorkflow: NoteUpdateWorkflow;
private fieldGroupingWorkflow: FieldGroupingWorkflow;
private runtime: AnkiIntegrationRuntime;
constructor(
config: AnkiConnectConfig,
@@ -148,7 +148,7 @@ export class AnkiIntegration {
}) => Promise<KikuFieldGroupingChoice>,
knownWordCacheStatePath?: string,
) {
this.config = this.normalizeConfig(config);
this.config = normalizeAnkiIntegrationConfig(config);
this.client = new AnkiConnectClient(this.config.url!);
this.mediaGenerator = new MediaGenerator();
this.timingTracker = timingTracker;
@@ -163,6 +163,7 @@ export class AnkiIntegration {
this.fieldGroupingService = this.createFieldGroupingService();
this.noteUpdateWorkflow = this.createNoteUpdateWorkflow();
this.fieldGroupingWorkflow = this.createFieldGroupingWorkflow();
this.runtime = this.createRuntime(config);
}
private createFieldGroupingMergeCollaborator(): FieldGroupingMergeCollaborator {
@@ -182,75 +183,6 @@ export class AnkiIntegration {
});
}
private normalizeConfig(config: AnkiConnectConfig): AnkiConnectConfig {
const resolvedUrl =
typeof config.url === 'string' && config.url.trim().length > 0
? config.url.trim()
: DEFAULT_ANKI_CONNECT_CONFIG.url;
const proxySource =
config.proxy && typeof config.proxy === 'object'
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
: {};
const normalizedProxyPort =
typeof proxySource.port === 'number' &&
Number.isInteger(proxySource.port) &&
proxySource.port >= 1 &&
proxySource.port <= 65535
? proxySource.port
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.port;
const normalizedProxyHost =
typeof proxySource.host === 'string' && proxySource.host.trim().length > 0
? proxySource.host.trim()
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.host;
const normalizedProxyUpstreamUrl =
typeof proxySource.upstreamUrl === 'string' && proxySource.upstreamUrl.trim().length > 0
? proxySource.upstreamUrl.trim()
: resolvedUrl;
return {
...DEFAULT_ANKI_CONNECT_CONFIG,
...config,
url: resolvedUrl,
fields: {
...DEFAULT_ANKI_CONNECT_CONFIG.fields,
...(config.fields ?? {}),
},
proxy: {
...DEFAULT_ANKI_CONNECT_CONFIG.proxy,
...(config.proxy ?? {}),
enabled: proxySource.enabled === true,
host: normalizedProxyHost,
port: normalizedProxyPort,
upstreamUrl: normalizedProxyUpstreamUrl,
},
ai: {
...DEFAULT_ANKI_CONNECT_CONFIG.ai,
...(config.openRouter ?? {}),
...(config.ai ?? {}),
},
media: {
...DEFAULT_ANKI_CONNECT_CONFIG.media,
...(config.media ?? {}),
},
behavior: {
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
...(config.behavior ?? {}),
},
metadata: {
...DEFAULT_ANKI_CONNECT_CONFIG.metadata,
...(config.metadata ?? {}),
},
isLapis: {
...DEFAULT_ANKI_CONNECT_CONFIG.isLapis,
...(config.isLapis ?? {}),
},
isKiku: {
...DEFAULT_ANKI_CONNECT_CONFIG.isKiku,
...(config.isKiku ?? {}),
},
} as AnkiConnectConfig;
}
private createKnownWordCache(knownWordCacheStatePath?: string): KnownWordCacheManager {
return new KnownWordCacheManager({
client: {
@@ -302,11 +234,20 @@ export class AnkiIntegration {
});
}
private getOrCreateProxyServer(): AnkiConnectProxyServer {
if (!this.proxyServer) {
this.proxyServer = this.createProxyServer();
}
return this.proxyServer;
private createRuntime(initialConfig: AnkiConnectConfig): AnkiIntegrationRuntime {
return new AnkiIntegrationRuntime({
initialConfig,
pollingRunner: this.pollingRunner,
knownWordCache: this.knownWordCache,
proxyServerFactory: () => this.createProxyServer(),
logInfo: (message, ...args) => log.info(message, ...args),
logWarn: (message, ...args) => log.warn(message, ...args),
logError: (message, ...args) => log.error(message, ...args),
onConfigChanged: (nextConfig) => {
this.config = nextConfig;
this.client = new AnkiConnectClient(nextConfig.url!);
},
});
}
private createCardCreationService(): CardCreationService {
@@ -517,14 +458,6 @@ export class AnkiIntegration {
return this.config.nPlusOne?.highlightEnabled === true;
}
private startKnownWordCacheLifecycle(): void {
this.knownWordCache.startLifecycle();
}
private stopKnownWordCacheLifecycle(): void {
this.knownWordCache.stopLifecycle();
}
private getConfiguredAnkiTags(): string[] {
if (!Array.isArray(this.config.tags)) {
return [];
@@ -606,64 +539,12 @@ export class AnkiIntegration {
};
}
private isProxyTransportEnabled(config: AnkiConnectConfig = this.config): boolean {
return config.proxy?.enabled === true;
}
private getTransportConfigKey(config: AnkiConnectConfig = this.config): string {
if (this.isProxyTransportEnabled(config)) {
return [
'proxy',
config.proxy?.host ?? '',
String(config.proxy?.port ?? ''),
config.proxy?.upstreamUrl ?? '',
].join(':');
}
return ['polling', String(config.pollingRate ?? DEFAULT_ANKI_CONNECT_CONFIG.pollingRate)].join(
':',
);
}
private startTransport(): void {
if (this.isProxyTransportEnabled()) {
const proxyHost = this.config.proxy?.host ?? '127.0.0.1';
const proxyPort = this.config.proxy?.port ?? 8766;
const upstreamUrl = this.config.proxy?.upstreamUrl ?? this.config.url ?? '';
this.getOrCreateProxyServer().start({
host: proxyHost,
port: proxyPort,
upstreamUrl,
});
log.info(
`Starting AnkiConnect integration with local proxy: http://${proxyHost}:${proxyPort} -> ${upstreamUrl}`,
);
return;
}
log.info('Starting AnkiConnect integration with polling rate:', this.config.pollingRate);
this.pollingRunner.start();
}
private stopTransport(): void {
this.pollingRunner.stop();
this.proxyServer?.stop();
}
start(): void {
if (this.started) {
this.stop();
}
this.startKnownWordCacheLifecycle();
this.startTransport();
this.started = true;
this.runtime.start();
}
stop(): void {
this.stopTransport();
this.stopKnownWordCacheLifecycle();
this.started = false;
log.info('Stopped AnkiConnect integration');
this.runtime.stop();
}
private async processNewCard(
@@ -1216,58 +1097,7 @@ export class AnkiIntegration {
}
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
const wasEnabled = this.config.nPlusOne?.highlightEnabled === true;
const previousTransportKey = this.getTransportConfigKey(this.config);
const mergedConfig: AnkiConnectConfig = {
...this.config,
...patch,
nPlusOne:
patch.nPlusOne !== undefined
? {
...(this.config.nPlusOne ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne),
...patch.nPlusOne,
}
: this.config.nPlusOne,
fields:
patch.fields !== undefined
? { ...this.config.fields, ...patch.fields }
: this.config.fields,
media:
patch.media !== undefined ? { ...this.config.media, ...patch.media } : this.config.media,
behavior:
patch.behavior !== undefined
? { ...this.config.behavior, ...patch.behavior }
: this.config.behavior,
proxy:
patch.proxy !== undefined ? { ...this.config.proxy, ...patch.proxy } : this.config.proxy,
metadata:
patch.metadata !== undefined
? { ...this.config.metadata, ...patch.metadata }
: this.config.metadata,
isLapis:
patch.isLapis !== undefined
? { ...this.config.isLapis, ...patch.isLapis }
: this.config.isLapis,
isKiku:
patch.isKiku !== undefined
? { ...this.config.isKiku, ...patch.isKiku }
: this.config.isKiku,
};
this.config = this.normalizeConfig(mergedConfig);
if (wasEnabled && this.config.nPlusOne?.highlightEnabled === false) {
this.stopKnownWordCacheLifecycle();
this.knownWordCache.clearKnownWordCacheState();
} else {
this.startKnownWordCacheLifecycle();
}
const nextTransportKey = this.getTransportConfigKey(this.config);
if (this.started && previousTransportKey !== nextTransportKey) {
this.stopTransport();
this.startTransport();
}
this.runtime.applyRuntimeConfigPatch(patch);
}
destroy(): void {

View File

@@ -80,7 +80,7 @@ export class FieldGroupingWorkflow {
async handleManual(
originalNoteId: number,
newNoteId: number,
_newNoteId: number,
newNoteInfo: FieldGroupingWorkflowNoteInfo,
): Promise<boolean> {
const callback = await this.resolveFieldGroupingCallback();

View File

@@ -0,0 +1,108 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
import type { AnkiConnectConfig } from '../types';
import { AnkiIntegrationRuntime } from './runtime';
function createRuntime(
config: Partial<AnkiConnectConfig> = {},
overrides: Partial<ConstructorParameters<typeof AnkiIntegrationRuntime>[0]> = {},
) {
const calls: string[] = [];
const runtime = new AnkiIntegrationRuntime({
initialConfig: config as AnkiConnectConfig,
pollingRunner: {
start: () => calls.push('polling:start'),
stop: () => calls.push('polling:stop'),
},
knownWordCache: {
startLifecycle: () => calls.push('known:start'),
stopLifecycle: () => calls.push('known:stop'),
clearKnownWordCacheState: () => calls.push('known:clear'),
},
proxyServerFactory: () => ({
start: ({ host, port, upstreamUrl }) =>
calls.push(`proxy:start:${host}:${port}:${upstreamUrl}`),
stop: () => calls.push('proxy:stop'),
}),
logInfo: () => undefined,
logWarn: () => undefined,
logError: () => undefined,
onConfigChanged: () => undefined,
...overrides,
});
return { runtime, calls };
}
test('AnkiIntegrationRuntime normalizes url and proxy defaults', () => {
const { runtime } = createRuntime({
url: ' http://anki.local:8765 ',
proxy: {
enabled: true,
host: ' 0.0.0.0 ',
port: 7001,
upstreamUrl: ' ',
},
});
const normalized = runtime.getConfig();
assert.equal(normalized.url, 'http://anki.local:8765');
assert.equal(normalized.proxy?.enabled, true);
assert.equal(normalized.proxy?.host, '0.0.0.0');
assert.equal(normalized.proxy?.port, 7001);
assert.equal(normalized.proxy?.upstreamUrl, 'http://anki.local:8765');
assert.equal(
normalized.media?.fallbackDuration,
DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration,
);
});
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
const { runtime, calls } = createRuntime({
proxy: {
enabled: true,
host: '127.0.0.1',
port: 9999,
upstreamUrl: 'http://upstream:8765',
},
});
runtime.start();
assert.deepEqual(calls, ['known:start', 'proxy:start:127.0.0.1:9999:http://upstream:8765']);
});
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
const { runtime, calls } = createRuntime({
nPlusOne: {
highlightEnabled: true,
},
pollingRate: 250,
});
runtime.start();
calls.length = 0;
runtime.applyRuntimeConfigPatch({
nPlusOne: {
highlightEnabled: false,
},
proxy: {
enabled: true,
host: '127.0.0.1',
port: 8766,
upstreamUrl: 'http://127.0.0.1:8765',
},
});
assert.deepEqual(calls, [
'known:stop',
'known:clear',
'polling:stop',
'proxy:start:127.0.0.1:8766:http://127.0.0.1:8765',
]);
});

View File

@@ -0,0 +1,232 @@
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
import type { AnkiConnectConfig } from '../types';
export interface AnkiIntegrationRuntimeProxyServer {
start(options: { host: string; port: number; upstreamUrl: string }): void;
stop(): void;
}
interface AnkiIntegrationRuntimeDeps {
initialConfig: AnkiConnectConfig;
pollingRunner: {
start(): void;
stop(): void;
};
knownWordCache: {
startLifecycle(): void;
stopLifecycle(): void;
clearKnownWordCacheState(): void;
};
proxyServerFactory: () => AnkiIntegrationRuntimeProxyServer;
logInfo: (message: string, ...args: unknown[]) => void;
logWarn: (message: string, ...args: unknown[]) => void;
logError: (message: string, ...args: unknown[]) => void;
onConfigChanged?: (config: AnkiConnectConfig) => void;
}
function trimToNonEmptyString(value: unknown): string | null {
if (typeof value !== 'string') return null;
const trimmed = value.trim();
return trimmed.length > 0 ? trimmed : null;
}
export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiConnectConfig {
const resolvedUrl = trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
const proxySource =
config.proxy && typeof config.proxy === 'object'
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
: {};
const normalizedProxyPort =
typeof proxySource.port === 'number' &&
Number.isInteger(proxySource.port) &&
proxySource.port >= 1 &&
proxySource.port <= 65535
? proxySource.port
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.port;
const normalizedProxyHost =
trimToNonEmptyString(proxySource.host) ?? DEFAULT_ANKI_CONNECT_CONFIG.proxy?.host;
const normalizedProxyUpstreamUrl = trimToNonEmptyString(proxySource.upstreamUrl) ?? resolvedUrl;
return {
...DEFAULT_ANKI_CONNECT_CONFIG,
...config,
url: resolvedUrl,
fields: {
...DEFAULT_ANKI_CONNECT_CONFIG.fields,
...(config.fields ?? {}),
},
proxy: {
...DEFAULT_ANKI_CONNECT_CONFIG.proxy,
...(config.proxy ?? {}),
enabled: proxySource.enabled === true,
host: normalizedProxyHost,
port: normalizedProxyPort,
upstreamUrl: normalizedProxyUpstreamUrl,
},
ai: {
...DEFAULT_ANKI_CONNECT_CONFIG.ai,
...(config.openRouter ?? {}),
...(config.ai ?? {}),
},
media: {
...DEFAULT_ANKI_CONNECT_CONFIG.media,
...(config.media ?? {}),
},
behavior: {
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
...(config.behavior ?? {}),
},
metadata: {
...DEFAULT_ANKI_CONNECT_CONFIG.metadata,
...(config.metadata ?? {}),
},
isLapis: {
...DEFAULT_ANKI_CONNECT_CONFIG.isLapis,
...(config.isLapis ?? {}),
},
isKiku: {
...DEFAULT_ANKI_CONNECT_CONFIG.isKiku,
...(config.isKiku ?? {}),
},
} as AnkiConnectConfig;
}
export class AnkiIntegrationRuntime {
private config: AnkiConnectConfig;
private proxyServer: AnkiIntegrationRuntimeProxyServer | null = null;
private started = false;
constructor(private readonly deps: AnkiIntegrationRuntimeDeps) {
this.config = normalizeAnkiIntegrationConfig(deps.initialConfig);
}
getConfig(): AnkiConnectConfig {
return this.config;
}
start(): void {
if (this.started) {
this.stop();
}
this.deps.knownWordCache.startLifecycle();
this.startTransport();
this.started = true;
}
stop(): void {
this.stopTransport();
this.deps.knownWordCache.stopLifecycle();
this.started = false;
this.deps.logInfo('Stopped AnkiConnect integration');
}
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
const wasKnownWordCacheEnabled = this.config.nPlusOne?.highlightEnabled === true;
const previousTransportKey = this.getTransportConfigKey(this.config);
const mergedConfig: AnkiConnectConfig = {
...this.config,
...patch,
nPlusOne:
patch.nPlusOne !== undefined
? {
...(this.config.nPlusOne ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne),
...patch.nPlusOne,
}
: this.config.nPlusOne,
fields:
patch.fields !== undefined
? { ...this.config.fields, ...patch.fields }
: this.config.fields,
media:
patch.media !== undefined ? { ...this.config.media, ...patch.media } : this.config.media,
behavior:
patch.behavior !== undefined
? { ...this.config.behavior, ...patch.behavior }
: this.config.behavior,
proxy:
patch.proxy !== undefined ? { ...this.config.proxy, ...patch.proxy } : this.config.proxy,
metadata:
patch.metadata !== undefined
? { ...this.config.metadata, ...patch.metadata }
: this.config.metadata,
isLapis:
patch.isLapis !== undefined
? { ...this.config.isLapis, ...patch.isLapis }
: this.config.isLapis,
isKiku:
patch.isKiku !== undefined
? { ...this.config.isKiku, ...patch.isKiku }
: this.config.isKiku,
};
this.config = normalizeAnkiIntegrationConfig(mergedConfig);
this.deps.onConfigChanged?.(this.config);
if (wasKnownWordCacheEnabled && this.config.nPlusOne?.highlightEnabled === false) {
this.deps.knownWordCache.stopLifecycle();
this.deps.knownWordCache.clearKnownWordCacheState();
} else {
this.deps.knownWordCache.startLifecycle();
}
const nextTransportKey = this.getTransportConfigKey(this.config);
if (this.started && previousTransportKey !== nextTransportKey) {
this.stopTransport();
this.startTransport();
}
}
getOrCreateProxyServer(): AnkiIntegrationRuntimeProxyServer {
if (!this.proxyServer) {
this.proxyServer = this.deps.proxyServerFactory();
}
return this.proxyServer;
}
private isProxyTransportEnabled(config: AnkiConnectConfig = this.config): boolean {
return config.proxy?.enabled === true;
}
private getTransportConfigKey(config: AnkiConnectConfig = this.config): string {
if (this.isProxyTransportEnabled(config)) {
return [
'proxy',
config.proxy?.host ?? '',
String(config.proxy?.port ?? ''),
config.proxy?.upstreamUrl ?? '',
].join(':');
}
return ['polling', String(config.pollingRate ?? DEFAULT_ANKI_CONNECT_CONFIG.pollingRate)].join(
':',
);
}
private startTransport(): void {
if (this.isProxyTransportEnabled()) {
const proxyHost = this.config.proxy?.host ?? '127.0.0.1';
const proxyPort = this.config.proxy?.port ?? 8766;
const upstreamUrl = this.config.proxy?.upstreamUrl ?? this.config.url ?? '';
this.getOrCreateProxyServer().start({
host: proxyHost,
port: proxyPort,
upstreamUrl,
});
this.deps.logInfo(
`Starting AnkiConnect integration with local proxy: http://${proxyHost}:${proxyPort} -> ${upstreamUrl}`,
);
return;
}
this.deps.logInfo(
'Starting AnkiConnect integration with polling rate:',
this.config.pollingRate,
);
this.deps.pollingRunner.start();
}
private stopTransport(): void {
this.deps.pollingRunner.stop();
this.proxyServer?.stop();
}
}

View File

@@ -169,4 +169,9 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
assert.equal(background.background, true);
assert.equal(hasExplicitCommand(background), true);
assert.equal(shouldStartApp(background), true);
const setup = parseArgs(['--setup']);
assert.equal((setup as typeof setup & { setup?: boolean }).setup, true);
assert.equal(hasExplicitCommand(setup), true);
assert.equal(shouldStartApp(setup), true);
});

View File

@@ -5,6 +5,7 @@ export interface CliArgs {
toggle: boolean;
toggleVisibleOverlay: boolean;
settings: boolean;
setup: boolean;
show: boolean;
hide: boolean;
showVisibleOverlay: boolean;
@@ -71,6 +72,7 @@ export function parseArgs(argv: string[]): CliArgs {
toggle: false,
toggleVisibleOverlay: false,
settings: false,
setup: false,
show: false,
hide: false,
showVisibleOverlay: false,
@@ -125,6 +127,7 @@ export function parseArgs(argv: string[]): CliArgs {
else if (arg === '--toggle') args.toggle = true;
else if (arg === '--toggle-visible-overlay') args.toggleVisibleOverlay = true;
else if (arg === '--settings' || arg === '--yomitan') args.settings = true;
else if (arg === '--setup') args.setup = true;
else if (arg === '--show') args.show = true;
else if (arg === '--hide') args.hide = true;
else if (arg === '--show-visible-overlay') args.showVisibleOverlay = true;
@@ -298,6 +301,7 @@ export function hasExplicitCommand(args: CliArgs): boolean {
args.toggle ||
args.toggleVisibleOverlay ||
args.settings ||
args.setup ||
args.show ||
args.hide ||
args.showVisibleOverlay ||
@@ -341,6 +345,7 @@ export function shouldStartApp(args: CliArgs): boolean {
args.toggle ||
args.toggleVisibleOverlay ||
args.settings ||
args.setup ||
args.copySubtitle ||
args.copySubtitleMultiple ||
args.mineSentence ||
@@ -371,6 +376,7 @@ export function shouldRunSettingsOnlyStartup(args: CliArgs): boolean {
!args.toggleVisibleOverlay &&
!args.show &&
!args.hide &&
!args.setup &&
!args.showVisibleOverlay &&
!args.hideVisibleOverlay &&
!args.copySubtitle &&

View File

@@ -18,6 +18,7 @@ test('printHelp includes configured texthooker port', () => {
assert.match(output, /--help\s+Show this help/);
assert.match(output, /default: 7777/);
assert.match(output, /--refresh-known-words/);
assert.match(output, /--setup\s+Open first-run setup window/);
assert.match(output, /--anilist-status/);
assert.match(output, /--anilist-retry-queue/);
assert.match(output, /--dictionary/);

View File

@@ -20,6 +20,7 @@ ${B}Overlay${R}
--show-visible-overlay Show subtitle overlay
--hide-visible-overlay Hide subtitle overlay
--settings Open Yomitan settings window
--setup Open first-run setup window
--auto-start-overlay Auto-hide mpv subs, show overlay on connect
${B}Mining${R}

View File

@@ -16,6 +16,9 @@ test('loads defaults when config is missing', () => {
const service = new ConfigService(dir);
const config = service.getConfig();
assert.equal(config.websocket.port, DEFAULT_CONFIG.websocket.port);
assert.equal(config.annotationWebsocket.enabled, DEFAULT_CONFIG.annotationWebsocket.enabled);
assert.equal(config.annotationWebsocket.port, DEFAULT_CONFIG.annotationWebsocket.port);
assert.equal(config.texthooker.launchAtStartup, true);
assert.equal(config.ankiConnect.behavior.autoUpdateNewCards, true);
assert.deepEqual(config.ankiConnect.tags, ['SubMiner']);
assert.equal(config.anilist.enabled, false);
@@ -24,6 +27,9 @@ test('loads defaults when config is missing', () => {
assert.equal(config.anilist.characterDictionary.maxLoaded, 3);
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
assert.equal(config.anilist.characterDictionary.collapsibleSections.description, false);
assert.equal(config.anilist.characterDictionary.collapsibleSections.characterInformation, false);
assert.equal(config.anilist.characterDictionary.collapsibleSections.voicedBy, false);
assert.equal(config.jellyfin.remoteControlEnabled, true);
assert.equal(config.jellyfin.remoteControlAutoConnect, true);
assert.equal(config.jellyfin.autoAnnounce, false);
@@ -128,6 +134,88 @@ test('parses subtitleStyle.preserveLineBreaks and warns on invalid values', () =
);
});
test('parses texthooker.launchAtStartup and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"texthooker": {
"launchAtStartup": false
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(validService.getConfig().texthooker.launchAtStartup, false);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"texthooker": {
"launchAtStartup": "yes"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
invalidService.getConfig().texthooker.launchAtStartup,
DEFAULT_CONFIG.texthooker.launchAtStartup,
);
assert.ok(
invalidService.getWarnings().some((warning) => warning.path === 'texthooker.launchAtStartup'),
);
});
test('parses annotationWebsocket settings and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"annotationWebsocket": {
"enabled": false,
"port": 7788
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(validService.getConfig().annotationWebsocket.enabled, false);
assert.equal(validService.getConfig().annotationWebsocket.port, 7788);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"annotationWebsocket": {
"enabled": "yes",
"port": "bad"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
invalidService.getConfig().annotationWebsocket.enabled,
DEFAULT_CONFIG.annotationWebsocket.enabled,
);
assert.equal(
invalidService.getConfig().annotationWebsocket.port,
DEFAULT_CONFIG.annotationWebsocket.port,
);
assert.ok(
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.enabled'),
);
assert.ok(
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.port'),
);
});
test('parses subtitleStyle.autoPauseVideoOnHover and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
@@ -242,6 +330,47 @@ test('parses subtitleStyle.hoverTokenColor and warns on invalid values', () => {
);
});
test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchColor": "#eed49f"
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
.nameMatchColor ?? null) as string | null,
'#eed49f',
);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchColor": "pink"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
((invalidService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
.nameMatchColor ?? null) as string | null,
'#f5bde6',
);
assert.ok(
invalidService.getWarnings().some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
);
});
test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
@@ -280,6 +409,44 @@ test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values
);
});
test('parses subtitleStyle.nameMatchEnabled and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchEnabled": false
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(validService.getConfig().subtitleStyle.nameMatchEnabled, false);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchEnabled": "no"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
invalidService.getConfig().subtitleStyle.nameMatchEnabled,
DEFAULT_CONFIG.subtitleStyle.nameMatchEnabled,
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'subtitleStyle.nameMatchEnabled'),
);
});
test('parses anilist.enabled and warns for invalid value', () => {
const dir = makeTempDir();
fs.writeFileSync(
@@ -330,10 +497,49 @@ test('parses anilist.characterDictionary config with clamping and enum validatio
assert.equal(config.anilist.characterDictionary.maxLoaded, 20);
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'));
assert.ok(
warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'),
);
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.maxLoaded'));
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'));
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'));
assert.ok(
warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'),
);
assert.ok(
warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'),
);
});
test('parses anilist.characterDictionary.collapsibleSections booleans and warns on invalid values', () => {
const dir = makeTempDir();
fs.writeFileSync(
path.join(dir, 'config.jsonc'),
`{
"anilist": {
"characterDictionary": {
"collapsibleSections": {
"description": true,
"characterInformation": "yes",
"voicedBy": true
}
}
}
}`,
'utf-8',
);
const service = new ConfigService(dir);
const config = service.getConfig();
const warnings = service.getWarnings();
assert.equal(config.anilist.characterDictionary.collapsibleSections.description, true);
assert.equal(config.anilist.characterDictionary.collapsibleSections.characterInformation, false);
assert.equal(config.anilist.characterDictionary.collapsibleSections.voicedBy, true);
assert.ok(
warnings.some(
(warning) =>
warning.path === 'anilist.characterDictionary.collapsibleSections.characterInformation',
),
);
});
test('parses jellyfin remote control fields', () => {
@@ -759,6 +965,10 @@ test('warning emission order is deterministic across reloads', () => {
"enabled": "sometimes",
"port": -1
},
"annotationWebsocket": {
"enabled": "sometimes",
"port": -1
},
"logging": {
"level": "trace"
}
@@ -775,7 +985,14 @@ test('warning emission order is deterministic across reloads', () => {
assert.deepEqual(secondWarnings, firstWarnings);
assert.deepEqual(
firstWarnings.map((warning) => warning.path),
['unknownFeature', 'websocket.enabled', 'websocket.port', 'logging.level'],
[
'unknownFeature',
'websocket.enabled',
'websocket.port',
'annotationWebsocket.enabled',
'annotationWebsocket.port',
'logging.level',
],
);
});
@@ -1345,8 +1562,17 @@ test('template generator includes known keys', () => {
output,
/"enabled": "auto",? \/\/ Built-in subtitle websocket server mode\. Values: auto \| true \| false/,
);
assert.match(
output,
/"enabled": true,? \/\/ Annotated subtitle websocket server enabled state\. Values: true \| false/,
);
assert.match(output, /"port": 6678,? \/\/ Annotated subtitle websocket server port\./);
assert.match(
output,
/"enabled": false,? \/\/ Enable AnkiConnect integration\. Values: true \| false/,
);
assert.match(
output,
/"launchAtStartup": true,? \/\/ Launch texthooker server automatically when SubMiner starts\. Values: true \| false/,
);
});

View File

@@ -22,6 +22,7 @@ const {
subtitlePosition,
keybindings,
websocket,
annotationWebsocket,
logging,
texthooker,
shortcuts,
@@ -39,6 +40,7 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
subtitlePosition,
keybindings,
websocket,
annotationWebsocket,
logging,
texthooker,
ankiConnect,

View File

@@ -5,6 +5,7 @@ export const CORE_DEFAULT_CONFIG: Pick<
| 'subtitlePosition'
| 'keybindings'
| 'websocket'
| 'annotationWebsocket'
| 'logging'
| 'texthooker'
| 'shortcuts'
@@ -19,10 +20,15 @@ export const CORE_DEFAULT_CONFIG: Pick<
enabled: 'auto',
port: 6677,
},
annotationWebsocket: {
enabled: true,
port: 6678,
},
logging: {
level: 'info',
},
texthooker: {
launchAtStartup: true,
openBrowser: true,
},
shortcuts: {

View File

@@ -92,6 +92,11 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick<
maxLoaded: 3,
evictionPolicy: 'delete',
profileScope: 'all',
collapsibleSections: {
description: false,
characterInformation: false,
voicedBy: false,
},
},
},
jellyfin: {

View File

@@ -8,6 +8,8 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
autoPauseVideoOnYomitanPopup: false,
hoverTokenColor: '#f4dbd6',
hoverTokenBackgroundColor: 'rgba(54, 58, 79, 0.84)',
nameMatchEnabled: true,
nameMatchColor: '#f5bde6',
fontFamily: 'M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP',
fontSize: 35,
fontColor: '#cad3f5',
@@ -37,7 +39,7 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
mode: 'single',
matchMode: 'headword',
singleColor: '#f5a97f',
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#a6e3a1', '#8aadf4'],
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#8bd5ca', '#8aadf4'],
},
secondary: {
fontFamily: 'Inter, Noto Sans, Helvetica Neue, sans-serif',

View File

@@ -18,11 +18,13 @@ test('config option registry includes critical paths and has unique entries', ()
for (const requiredPath of [
'logging.level',
'annotationWebsocket.enabled',
'startupWarmups.lowPowerMode',
'subtitleStyle.enableJlpt',
'subtitleStyle.autoPauseVideoOnYomitanPopup',
'ankiConnect.enabled',
'anilist.characterDictionary.enabled',
'anilist.characterDictionary.collapsibleSections.description',
'immersionTracking.enabled',
]) {
assert.ok(paths.includes(requiredPath), `missing config path: ${requiredPath}`);
@@ -35,6 +37,7 @@ test('config template sections include expected domains and unique keys', () =>
const keys = CONFIG_TEMPLATE_SECTIONS.map((section) => section.key);
const requiredKeys: (typeof keys)[number][] = [
'websocket',
'annotationWebsocket',
'startupWarmups',
'subtitleStyle',
'ankiConnect',

View File

@@ -12,6 +12,12 @@ export function buildCoreConfigOptionRegistry(
defaultValue: defaultConfig.logging.level,
description: 'Minimum log level for runtime logging.',
},
{
path: 'texthooker.launchAtStartup',
kind: 'boolean',
defaultValue: defaultConfig.texthooker.launchAtStartup,
description: 'Launch texthooker server automatically when SubMiner starts.',
},
{
path: 'websocket.enabled',
kind: 'enum',
@@ -25,6 +31,18 @@ export function buildCoreConfigOptionRegistry(
defaultValue: defaultConfig.websocket.port,
description: 'Built-in subtitle websocket server port.',
},
{
path: 'annotationWebsocket.enabled',
kind: 'boolean',
defaultValue: defaultConfig.annotationWebsocket.enabled,
description: 'Annotated subtitle websocket server enabled state.',
},
{
path: 'annotationWebsocket.port',
kind: 'number',
defaultValue: defaultConfig.annotationWebsocket.port,
description: 'Annotated subtitle websocket server port.',
},
{
path: 'subsync.defaultMode',
kind: 'enum',

View File

@@ -171,6 +171,28 @@ export function buildIntegrationConfigOptionRegistry(
defaultValue: defaultConfig.anilist.characterDictionary.profileScope,
description: 'Yomitan profile scope for dictionary enable/disable updates.',
},
{
path: 'anilist.characterDictionary.collapsibleSections.description',
kind: 'boolean',
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.description,
description:
'Open the Description section by default in character dictionary glossary entries.',
},
{
path: 'anilist.characterDictionary.collapsibleSections.characterInformation',
kind: 'boolean',
defaultValue:
defaultConfig.anilist.characterDictionary.collapsibleSections.characterInformation,
description:
'Open the Character Information section by default in character dictionary glossary entries.',
},
{
path: 'anilist.characterDictionary.collapsibleSections.voicedBy',
kind: 'boolean',
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.voicedBy,
description:
'Open the Voiced by section by default in character dictionary glossary entries.',
},
{
path: 'jellyfin.enabled',
kind: 'boolean',

View File

@@ -47,6 +47,20 @@ export function buildSubtitleConfigOptionRegistry(
defaultValue: defaultConfig.subtitleStyle.hoverTokenBackgroundColor,
description: 'CSS color used for hovered subtitle token background highlight in mpv.',
},
{
path: 'subtitleStyle.nameMatchEnabled',
kind: 'boolean',
defaultValue: defaultConfig.subtitleStyle.nameMatchEnabled,
description:
'Enable subtitle token coloring for matches from the SubMiner character dictionary.',
},
{
path: 'subtitleStyle.nameMatchColor',
kind: 'string',
defaultValue: defaultConfig.subtitleStyle.nameMatchColor,
description:
'Hex color used when a subtitle token matches an entry from the SubMiner character dictionary.',
},
{
path: 'subtitleStyle.frequencyDictionary.enabled',
kind: 'boolean',

View File

@@ -10,7 +10,7 @@ const CORE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
},
{
title: 'Texthooker Server',
description: ['Control whether browser opens automatically for texthooker.'],
description: ['Configure texthooker startup launch and browser opening behavior.'],
key: 'texthooker',
},
{
@@ -21,6 +21,14 @@ const CORE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
],
key: 'websocket',
},
{
title: 'Annotation WebSocket',
description: [
'Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.',
'Independent from websocket.auto and defaults to port 6678.',
],
key: 'annotationWebsocket',
},
{
title: 'Logging',
description: ['Controls logging verbosity.', 'Set to debug for full runtime diagnostics.'],

View File

@@ -5,6 +5,18 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
const { src, resolved, warn } = context;
if (isObject(src.texthooker)) {
const launchAtStartup = asBoolean(src.texthooker.launchAtStartup);
if (launchAtStartup !== undefined) {
resolved.texthooker.launchAtStartup = launchAtStartup;
} else if (src.texthooker.launchAtStartup !== undefined) {
warn(
'texthooker.launchAtStartup',
src.texthooker.launchAtStartup,
resolved.texthooker.launchAtStartup,
'Expected boolean.',
);
}
const openBrowser = asBoolean(src.texthooker.openBrowser);
if (openBrowser !== undefined) {
resolved.texthooker.openBrowser = openBrowser;
@@ -44,6 +56,32 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
}
}
if (isObject(src.annotationWebsocket)) {
const enabled = asBoolean(src.annotationWebsocket.enabled);
if (enabled !== undefined) {
resolved.annotationWebsocket.enabled = enabled;
} else if (src.annotationWebsocket.enabled !== undefined) {
warn(
'annotationWebsocket.enabled',
src.annotationWebsocket.enabled,
resolved.annotationWebsocket.enabled,
'Expected boolean.',
);
}
const port = asNumber(src.annotationWebsocket.port);
if (port !== undefined && port > 0 && port <= 65535) {
resolved.annotationWebsocket.port = Math.floor(port);
} else if (src.annotationWebsocket.port !== undefined) {
warn(
'annotationWebsocket.port',
src.annotationWebsocket.port,
resolved.annotationWebsocket.port,
'Expected integer between 1 and 65535.',
);
}
}
if (isObject(src.logging)) {
const logLevel = asString(src.logging.level);
if (

View File

@@ -124,6 +124,31 @@ export function applyIntegrationConfig(context: ResolveContext): void {
'Expected string.',
);
}
if (isObject(characterDictionary.collapsibleSections)) {
const collapsibleSections = characterDictionary.collapsibleSections;
const keys = ['description', 'characterInformation', 'voicedBy'] as const;
for (const key of keys) {
const value = asBoolean(collapsibleSections[key]);
if (value !== undefined) {
resolved.anilist.characterDictionary.collapsibleSections[key] = value;
} else if (collapsibleSections[key] !== undefined) {
warn(
`anilist.characterDictionary.collapsibleSections.${key}`,
collapsibleSections[key],
resolved.anilist.characterDictionary.collapsibleSections[key],
'Expected boolean.',
);
}
}
} else if (characterDictionary.collapsibleSections !== undefined) {
warn(
'anilist.characterDictionary.collapsibleSections',
characterDictionary.collapsibleSections,
resolved.anilist.characterDictionary.collapsibleSections,
'Expected object.',
);
}
} else if (src.anilist.characterDictionary !== undefined) {
warn(
'anilist.characterDictionary',

View File

@@ -72,6 +72,11 @@ test('anilist character dictionary fields are parsed, clamped, and enum-validate
maxLoaded: 99,
evictionPolicy: 'purge' as never,
profileScope: 'global' as never,
collapsibleSections: {
description: true,
characterInformation: 'invalid' as never,
voicedBy: true,
} as never,
},
},
});
@@ -83,10 +88,19 @@ test('anilist character dictionary fields are parsed, clamped, and enum-validate
assert.equal(context.resolved.anilist.characterDictionary.maxLoaded, 20);
assert.equal(context.resolved.anilist.characterDictionary.evictionPolicy, 'delete');
assert.equal(context.resolved.anilist.characterDictionary.profileScope, 'all');
assert.equal(context.resolved.anilist.characterDictionary.collapsibleSections.description, true);
assert.equal(
context.resolved.anilist.characterDictionary.collapsibleSections.characterInformation,
false,
);
assert.equal(context.resolved.anilist.characterDictionary.collapsibleSections.voicedBy, true);
const warnedPaths = warnings.map((warning) => warning.path);
assert.ok(warnedPaths.includes('anilist.characterDictionary.refreshTtlHours'));
assert.ok(warnedPaths.includes('anilist.characterDictionary.maxLoaded'));
assert.ok(warnedPaths.includes('anilist.characterDictionary.evictionPolicy'));
assert.ok(warnedPaths.includes('anilist.characterDictionary.profileScope'));
assert.ok(
warnedPaths.includes('anilist.characterDictionary.collapsibleSections.characterInformation'),
);
});

View File

@@ -105,6 +105,8 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
const fallbackSubtitleStyleHoverTokenColor = resolved.subtitleStyle.hoverTokenColor;
const fallbackSubtitleStyleHoverTokenBackgroundColor =
resolved.subtitleStyle.hoverTokenBackgroundColor;
const fallbackSubtitleStyleNameMatchEnabled = resolved.subtitleStyle.nameMatchEnabled;
const fallbackSubtitleStyleNameMatchColor = resolved.subtitleStyle.nameMatchColor;
const fallbackFrequencyDictionary = {
...resolved.subtitleStyle.frequencyDictionary,
};
@@ -228,6 +230,38 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
);
}
const nameMatchColor = asColor(
(src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor,
);
const nameMatchEnabled = asBoolean(
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled,
);
if (nameMatchEnabled !== undefined) {
resolved.subtitleStyle.nameMatchEnabled = nameMatchEnabled;
} else if (
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined
) {
resolved.subtitleStyle.nameMatchEnabled = fallbackSubtitleStyleNameMatchEnabled;
warn(
'subtitleStyle.nameMatchEnabled',
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled,
resolved.subtitleStyle.nameMatchEnabled,
'Expected boolean.',
);
}
if (nameMatchColor !== undefined) {
resolved.subtitleStyle.nameMatchColor = nameMatchColor;
} else if ((src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor !== undefined) {
resolved.subtitleStyle.nameMatchColor = fallbackSubtitleStyleNameMatchColor;
warn(
'subtitleStyle.nameMatchColor',
(src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor,
resolved.subtitleStyle.nameMatchColor,
'Expected hex color.',
);
}
const frequencyDictionary = isObject(
(src.subtitleStyle as { frequencyDictionary?: unknown }).frequencyDictionary,
)

View File

@@ -66,6 +66,70 @@ test('subtitleStyle autoPauseVideoOnYomitanPopup falls back on invalid value', (
);
});
test('subtitleStyle nameMatchEnabled falls back on invalid value', () => {
const { context, warnings } = createResolveContext({
subtitleStyle: {
nameMatchEnabled: 'invalid' as unknown as boolean,
},
});
applySubtitleDomainConfig(context);
assert.equal(context.resolved.subtitleStyle.nameMatchEnabled, true);
assert.ok(
warnings.some(
(warning) =>
warning.path === 'subtitleStyle.nameMatchEnabled' &&
warning.message === 'Expected boolean.',
),
);
});
test('subtitleStyle frequencyDictionary defaults to the teal fourth band color', () => {
const { context } = createResolveContext({});
applySubtitleDomainConfig(context);
assert.deepEqual(context.resolved.subtitleStyle.frequencyDictionary.bandedColors, [
'#ed8796',
'#f5a97f',
'#f9e2af',
'#8bd5ca',
'#8aadf4',
]);
});
test('subtitleStyle nameMatchColor accepts valid values and warns on invalid', () => {
const valid = createResolveContext({
subtitleStyle: {
nameMatchColor: '#f5bde6',
},
});
applySubtitleDomainConfig(valid.context);
assert.equal(
(valid.context.resolved.subtitleStyle as { nameMatchColor?: string }).nameMatchColor,
'#f5bde6',
);
const invalid = createResolveContext({
subtitleStyle: {
nameMatchColor: 'pink',
},
});
applySubtitleDomainConfig(invalid.context);
assert.equal(
(invalid.context.resolved.subtitleStyle as { nameMatchColor?: string }).nameMatchColor,
'#f5bde6',
);
assert.ok(
invalid.warnings.some(
(warning) =>
warning.path === 'subtitleStyle.nameMatchColor' &&
warning.message === 'Expected hex color.',
),
);
});
test('subtitleStyle frequencyDictionary.matchMode accepts valid values and warns on invalid', () => {
const valid = createResolveContext({
subtitleStyle: {

View File

@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
toggle: false,
toggleVisibleOverlay: false,
settings: false,
setup: false,
show: false,
hide: false,
showVisibleOverlay: false,

View File

@@ -4,7 +4,8 @@ import { AppReadyRuntimeDeps, runAppReadyRuntime } from './startup';
function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
const calls: string[] = [];
const deps: AppReadyRuntimeDeps = {
const deps = {
ensureDefaultConfigBootstrap: () => calls.push('ensureDefaultConfigBootstrap'),
loadSubtitlePosition: () => calls.push('loadSubtitlePosition'),
resolveKeybindings: () => calls.push('resolveKeybindings'),
createMpvClient: () => calls.push('createMpvClient'),
@@ -20,8 +21,13 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
setSecondarySubMode: (mode) => calls.push(`setSecondarySubMode:${mode}`),
defaultSecondarySubMode: 'hover',
defaultWebsocketPort: 9001,
defaultAnnotationWebsocketPort: 6678,
defaultTexthookerPort: 5174,
hasMpvWebsocketPlugin: () => true,
startSubtitleWebsocket: (port) => calls.push(`startSubtitleWebsocket:${port}`),
startAnnotationWebsocket: (port) => calls.push(`startAnnotationWebsocket:${port}`),
startTexthooker: (port, websocketUrl) =>
calls.push(`startTexthooker:${port}:${websocketUrl ?? ''}`),
log: (message) => calls.push(`log:${message}`),
createMecabTokenizerAndCheck: async () => {
calls.push('createMecabTokenizerAndCheck');
@@ -34,6 +40,9 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
loadYomitanExtension: async () => {
calls.push('loadYomitanExtension');
},
handleFirstRunSetup: async () => {
calls.push('handleFirstRunSetup');
},
prewarmSubtitleDictionaries: async () => {
calls.push('prewarmSubtitleDictionaries');
},
@@ -42,12 +51,13 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
},
texthookerOnlyMode: false,
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
setVisibleOverlayVisible: (visible) => calls.push(`setVisibleOverlayVisible:${visible}`),
initializeOverlayRuntime: () => calls.push('initializeOverlayRuntime'),
handleInitialArgs: () => calls.push('handleInitialArgs'),
logDebug: (message) => calls.push(`debug:${message}`),
now: () => 1000,
...overrides,
};
} as AppReadyRuntimeDeps;
return { deps, calls };
}
@@ -56,8 +66,14 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
hasMpvWebsocketPlugin: () => false,
});
await runAppReadyRuntime(deps);
assert.ok(calls.includes('ensureDefaultConfigBootstrap'));
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
assert.ok(calls.includes('setVisibleOverlayVisible:true'));
assert.ok(calls.includes('initializeOverlayRuntime'));
assert.ok(
calls.indexOf('setVisibleOverlayVisible:true') < calls.indexOf('initializeOverlayRuntime'),
);
assert.ok(calls.includes('startBackgroundWarmups'));
assert.ok(
calls.includes(
@@ -66,6 +82,46 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
);
});
test('runAppReadyRuntime starts texthooker on startup when enabled in config', async () => {
const { deps, calls } = makeDeps({
getResolvedConfig: () => ({
websocket: { enabled: 'auto' },
secondarySub: {},
texthooker: { launchAtStartup: true },
}),
});
await runAppReadyRuntime(deps);
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
assert.ok(
calls.indexOf('createMpvClient') < calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678'),
);
assert.ok(
calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678') < calls.indexOf('handleInitialArgs'),
);
});
test('runAppReadyRuntime keeps annotation websocket enabled when regular websocket auto-skips', async () => {
const { deps, calls } = makeDeps({
getResolvedConfig: () => ({
websocket: { enabled: 'auto' },
annotationWebsocket: { enabled: true, port: 6678 },
secondarySub: {},
texthooker: { launchAtStartup: true },
}),
hasMpvWebsocketPlugin: () => true,
});
await runAppReadyRuntime(deps);
assert.equal(calls.includes('startSubtitleWebsocket:9001'), false);
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
assert.ok(calls.includes('log:mpv_websocket detected, skipping built-in WebSocket server'));
});
test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns true', async () => {
const { deps, calls } = makeDeps({
shouldSkipHeavyStartup: () => true,
@@ -97,6 +153,7 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
await runAppReadyRuntime(deps);
assert.equal(calls.includes('ensureDefaultConfigBootstrap'), true);
assert.equal(calls.includes('reloadConfig'), false);
assert.equal(calls.includes('getResolvedConfig'), false);
assert.equal(calls.includes('getConfigWarnings'), false);
@@ -111,7 +168,10 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
assert.equal(calls.includes('logConfigWarning'), false);
assert.equal(calls.includes('handleInitialArgs'), true);
assert.equal(calls.includes('loadYomitanExtension'), true);
assert.equal(calls.includes('handleFirstRunSetup'), true);
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleInitialArgs'));
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleFirstRunSetup'));
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
});
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {

View File

@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
toggle: false,
toggleVisibleOverlay: false,
settings: false,
setup: false,
show: false,
hide: false,
showVisibleOverlay: false,
@@ -96,6 +97,9 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
openYomitanSettingsDelayed: (delayMs) => {
calls.push(`openYomitanSettingsDelayed:${delayMs}`);
},
openFirstRunSetup: () => {
calls.push('openFirstRunSetup');
},
setVisibleOverlayVisible: (visible) => {
calls.push(`setVisibleOverlayVisible:${visible}`);
},
@@ -229,6 +233,16 @@ test('handleCliCommand processes --start for second-instance when overlay runtim
);
});
test('handleCliCommand opens first-run setup window for --setup', () => {
const { deps, calls } = createDeps();
handleCliCommand(makeArgs({ setup: true }), 'initial', deps);
assert.ok(calls.includes('openFirstRunSetup'));
assert.ok(calls.includes('log:Opened first-run setup flow.'));
assert.equal(calls.includes('openYomitanSettingsDelayed:1000'), false);
});
test('handleCliCommand applies cli log level for second-instance commands', () => {
const { deps, calls } = createDeps({
setLogLevel: (level) => {

View File

@@ -17,6 +17,7 @@ export interface CliCommandServiceDeps {
isOverlayRuntimeInitialized: () => boolean;
initializeOverlayRuntime: () => void;
toggleVisibleOverlay: () => void;
openFirstRunSetup: () => void;
openYomitanSettingsDelayed: (delayMs: number) => void;
setVisibleOverlayVisible: (visible: boolean) => void;
copyCurrentSubtitle: () => void;
@@ -115,6 +116,7 @@ interface MiningCliRuntime {
}
interface UiCliRuntime {
openFirstRunSetup: () => void;
openYomitanSettings: () => void;
cycleSecondarySubMode: () => void;
openRuntimeOptionsPalette: () => void;
@@ -195,6 +197,7 @@ export function createCliCommandDepsRuntime(
isOverlayRuntimeInitialized: options.overlay.isInitialized,
initializeOverlayRuntime: options.overlay.initialize,
toggleVisibleOverlay: options.overlay.toggleVisible,
openFirstRunSetup: options.ui.openFirstRunSetup,
openYomitanSettingsDelayed: (delayMs) => {
options.schedule(() => {
options.ui.openYomitanSettings();
@@ -258,7 +261,8 @@ export function handleCliCommand(
const ignoreSecondInstanceStart =
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
const shouldStart = (!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
const shouldStart =
(!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;
@@ -298,6 +302,9 @@ export function handleCliCommand(
if (args.toggle || args.toggleVisibleOverlay) {
deps.toggleVisibleOverlay();
} else if (args.setup) {
deps.openFirstRunSetup();
deps.log('Opened first-run setup flow.');
} else if (args.settings) {
deps.openYomitanSettingsDelayed(1000);
} else if (args.show || args.showVisibleOverlay) {

View File

@@ -38,6 +38,7 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
mpvSendCommand: (command) => {
sentCommands.push(command);
},
resolveProxyCommandOsd: async () => null,
isMpvConnected: () => true,
hasRuntimeOptionsManager: () => true,
...overrides,
@@ -52,30 +53,39 @@ test('handleMpvCommandFromIpc forwards regular mpv commands', () => {
assert.deepEqual(osd, []);
});
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', () => {
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions();
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
});
test('handleMpvCommandFromIpc emits osd for primary subtitle track keybinding proxies', () => {
const { options, sentCommands, osd } = createOptions();
test('handleMpvCommandFromIpc emits resolved osd for primary subtitle track keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions({
resolveProxyCommandOsd: async () => 'Subtitle track: Internal #3 - Japanese (active)',
});
handleMpvCommandFromIpc(['cycle', 'sid'], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
assert.deepEqual(osd, ['Subtitle track: ${sid}']);
assert.deepEqual(osd, ['Subtitle track: Internal #3 - Japanese (active)']);
});
test('handleMpvCommandFromIpc emits osd for secondary subtitle track keybinding proxies', () => {
const { options, sentCommands, osd } = createOptions();
test('handleMpvCommandFromIpc emits resolved osd for secondary subtitle track keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions({
resolveProxyCommandOsd: async () =>
'Secondary subtitle track: External #8 - English Commentary',
});
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
assert.deepEqual(osd, ['Secondary subtitle track: ${secondary-sid}']);
assert.deepEqual(osd, ['Secondary subtitle track: External #8 - English Commentary']);
});
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', () => {
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions();
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
});

View File

@@ -23,6 +23,7 @@ export interface HandleMpvCommandFromIpcOptions {
mpvPlayNextSubtitle: () => void;
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
mpvSendCommand: (command: (string | number)[]) => void;
resolveProxyCommandOsd?: (command: (string | number)[]) => Promise<string | null>;
isMpvConnected: () => boolean;
hasRuntimeOptionsManager: () => boolean;
}
@@ -36,7 +37,7 @@ const MPV_PROPERTY_COMMANDS = new Set([
'multiply',
]);
function resolveProxyCommandOsd(command: (string | number)[]): string | null {
function resolveProxyCommandOsdTemplate(command: (string | number)[]): string | null {
const operation = typeof command[0] === 'string' ? command[0] : '';
const property = typeof command[1] === 'string' ? command[1] : '';
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
@@ -55,6 +56,25 @@ function resolveProxyCommandOsd(command: (string | number)[]): string | null {
return null;
}
function showResolvedProxyCommandOsd(
command: (string | number)[],
options: HandleMpvCommandFromIpcOptions,
): void {
const template = resolveProxyCommandOsdTemplate(command);
if (!template) return;
const emit = async () => {
try {
const resolved = await options.resolveProxyCommandOsd?.(command);
options.showMpvOsd(resolved || template);
} catch {
options.showMpvOsd(template);
}
};
void emit();
}
export function handleMpvCommandFromIpc(
command: (string | number)[],
options: HandleMpvCommandFromIpcOptions,
@@ -103,10 +123,7 @@ export function handleMpvCommandFromIpc(
options.mpvPlayNextSubtitle();
} else {
options.mpvSendCommand(command);
const osd = resolveProxyCommandOsd(command);
if (osd) {
options.showMpvOsd(osd);
}
showResolvedProxyCommandOsd(command, options);
}
}
}

View File

@@ -22,6 +22,22 @@ test('showMpvOsdRuntime sends show-text when connected', () => {
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
});
test('showMpvOsdRuntime enables property expansion for placeholder-based messages', () => {
const commands: (string | number)[][] = [];
showMpvOsdRuntime(
{
connected: true,
send: ({ command }) => {
commands.push(command);
},
},
'Subtitle delay: ${sub-delay}',
);
assert.deepEqual(commands, [
['expand-properties', 'show-text', 'Subtitle delay: ${sub-delay}', '3000'],
]);
});
test('showMpvOsdRuntime logs fallback when disconnected', () => {
const logs: string[] = [];
showMpvOsdRuntime(

View File

@@ -53,7 +53,10 @@ export function showMpvOsdRuntime(
fallbackLog: (text: string) => void = (line) => logger.info(line),
): void {
if (mpvClient && mpvClient.connected) {
mpvClient.send({ command: ['show-text', text, '3000'] });
const command = text.includes('${')
? ['expand-properties', 'show-text', text, '3000']
: ['show-text', text, '3000'];
mpvClient.send({ command });
return;
}
fallbackLog(`OSD (MPV not connected): ${text}`);

View File

@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
toggle: false,
toggleVisibleOverlay: false,
settings: false,
setup: false,
show: false,
hide: false,
showVisibleOverlay: false,

View File

@@ -69,6 +69,13 @@ export function runStartupBootstrapRuntime(
}
interface AppReadyConfigLike {
annotationWebsocket?: {
enabled?: boolean;
port?: number;
};
texthooker?: {
launchAtStartup?: boolean;
};
secondarySub?: {
defaultMode?: SecondarySubMode;
};
@@ -92,6 +99,7 @@ interface AppReadyConfigLike {
}
export interface AppReadyRuntimeDeps {
ensureDefaultConfigBootstrap: () => void;
loadSubtitlePosition: () => void;
resolveKeybindings: () => void;
createMpvClient: () => void;
@@ -104,18 +112,24 @@ export interface AppReadyRuntimeDeps {
setSecondarySubMode: (mode: SecondarySubMode) => void;
defaultSecondarySubMode: SecondarySubMode;
defaultWebsocketPort: number;
defaultAnnotationWebsocketPort: number;
defaultTexthookerPort: number;
hasMpvWebsocketPlugin: () => boolean;
startSubtitleWebsocket: (port: number) => void;
startAnnotationWebsocket: (port: number) => void;
startTexthooker: (port: number, websocketUrl?: string) => void;
log: (message: string) => void;
createMecabTokenizerAndCheck: () => Promise<void>;
createSubtitleTimingTracker: () => void;
createImmersionTracker?: () => void;
startJellyfinRemoteSession?: () => Promise<void>;
loadYomitanExtension: () => Promise<void>;
handleFirstRunSetup: () => Promise<void>;
prewarmSubtitleDictionaries?: () => Promise<void>;
startBackgroundWarmups: () => void;
texthookerOnlyMode: boolean;
shouldAutoInitializeOverlayRuntimeFromConfig: () => boolean;
setVisibleOverlayVisible: (visible: boolean) => void;
initializeOverlayRuntime: () => void;
handleInitialArgs: () => void;
logDebug?: (message: string) => void;
@@ -168,8 +182,10 @@ export function isAutoUpdateEnabledRuntime(
export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<void> {
const now = deps.now ?? (() => Date.now());
const startupStartedAtMs = now();
deps.ensureDefaultConfigBootstrap();
if (deps.shouldSkipHeavyStartup?.()) {
await deps.loadYomitanExtension();
await deps.handleFirstRunSetup();
deps.handleInitialArgs();
return;
}
@@ -178,6 +194,7 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
if (deps.shouldSkipHeavyStartup?.()) {
await deps.loadYomitanExtension();
await deps.handleFirstRunSetup();
deps.handleInitialArgs();
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
return;
@@ -209,6 +226,11 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
const wsConfig = config.websocket || {};
const wsEnabled = wsConfig.enabled ?? 'auto';
const wsPort = wsConfig.port || deps.defaultWebsocketPort;
const annotationWsConfig = config.annotationWebsocket || {};
const annotationWsEnabled = annotationWsConfig.enabled !== false;
const annotationWsPort = annotationWsConfig.port || deps.defaultAnnotationWebsocketPort;
const texthookerPort = deps.defaultTexthookerPort;
let texthookerWebsocketUrl: string | undefined;
if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
deps.startSubtitleWebsocket(wsPort);
@@ -216,6 +238,17 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
deps.log('mpv_websocket detected, skipping built-in WebSocket server');
}
if (annotationWsEnabled) {
deps.startAnnotationWebsocket(annotationWsPort);
texthookerWebsocketUrl = `ws://127.0.0.1:${annotationWsPort}`;
} else if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
texthookerWebsocketUrl = `ws://127.0.0.1:${wsPort}`;
}
if (config.texthooker?.launchAtStartup !== false) {
deps.startTexthooker(texthookerPort, texthookerWebsocketUrl);
}
deps.createSubtitleTimingTracker();
if (deps.createImmersionTracker) {
deps.log('Runtime ready: immersion tracker startup deferred until first media activity.');
@@ -226,11 +259,14 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
if (deps.texthookerOnlyMode) {
deps.log('Texthooker-only mode enabled; skipping overlay window.');
} else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) {
deps.setVisibleOverlayVisible(true);
deps.initializeOverlayRuntime();
} else {
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
}
await deps.loadYomitanExtension();
await deps.handleFirstRunSetup();
deps.handleInitialArgs();
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
}

View File

@@ -1,6 +1,10 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { serializeSubtitleMarkup, serializeSubtitleWebsocketMessage } from './subtitle-ws';
import {
serializeInitialSubtitleWebsocketMessage,
serializeSubtitleMarkup,
serializeSubtitleWebsocketMessage,
} from './subtitle-ws';
import { PartOfSpeech, type SubtitleData } from '../../types';
const frequencyOptions = {
@@ -78,6 +82,51 @@ test('serializeSubtitleMarkup includes known, n+1, jlpt, and frequency classes',
assert.match(markup, /word word-frequency-band-1/);
});
test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence', () => {
const payload: SubtitleData = {
text: 'ignored',
tokens: [
{
surface: '無事',
reading: 'ぶじ',
headword: '無事',
startPos: 0,
endPos: 2,
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: true,
isNPlusOneTarget: false,
jlptLevel: 'N2',
frequencyRank: 745,
},
{
surface: 'アレクシア',
reading: 'あれくしあ',
headword: 'アレクシア',
startPos: 2,
endPos: 7,
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
isNameMatch: true,
frequencyRank: 12,
},
],
};
const markup = serializeSubtitleMarkup(payload, frequencyOptions);
assert.match(
markup,
/<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事<\/span>/,
);
assert.match(
markup,
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア" data-frequency-rank="12">アレクシア<\/span>/,
);
assert.doesNotMatch(markup, /word-name-match word-known|word-known word-name-match/);
});
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
const payload: SubtitleData = {
text: '字幕',
@@ -85,5 +134,101 @@ test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
};
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
assert.deepEqual(JSON.parse(raw), { sentence: '字幕' });
assert.deepEqual(JSON.parse(raw), {
version: 1,
text: '字幕',
sentence: '字幕',
tokens: [],
});
});
test('serializeSubtitleWebsocketMessage emits structured token api payload', () => {
const payload: SubtitleData = {
text: '無事',
tokens: [
{
surface: '無事',
reading: 'ぶじ',
headword: '無事',
startPos: 0,
endPos: 2,
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: true,
isNPlusOneTarget: false,
jlptLevel: 'N2',
frequencyRank: 745,
},
],
};
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
assert.deepEqual(JSON.parse(raw), {
version: 1,
text: '無事',
sentence:
'<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事</span>',
tokens: [
{
surface: '無事',
reading: 'ぶじ',
headword: '無事',
startPos: 0,
endPos: 2,
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: true,
isNPlusOneTarget: false,
isNameMatch: false,
jlptLevel: 'N2',
frequencyRank: 745,
className: 'word word-known word-jlpt-n2',
frequencyRankLabel: '745',
jlptLevelLabel: 'N2',
},
],
});
});
test('serializeInitialSubtitleWebsocketMessage keeps annotated current subtitle content', () => {
const payload: SubtitleData = {
text: 'ignored fallback',
tokens: [
{
surface: '既知',
reading: '',
headword: '',
startPos: 0,
endPos: 2,
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: true,
isNPlusOneTarget: false,
},
],
};
const raw = serializeInitialSubtitleWebsocketMessage(payload, frequencyOptions);
assert.deepEqual(JSON.parse(raw ?? ''), {
version: 1,
text: 'ignored fallback',
sentence: '<span class="word word-known">既知</span>',
tokens: [
{
surface: '既知',
reading: '',
headword: '',
startPos: 0,
endPos: 2,
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: true,
isNPlusOneTarget: false,
isNameMatch: false,
className: 'word word-known',
frequencyRankLabel: null,
jlptLevelLabel: null,
},
],
});
});

View File

@@ -18,6 +18,26 @@ export type SubtitleWebsocketFrequencyOptions = {
mode: 'single' | 'banded';
};
type SerializedSubtitleToken = Pick<
MergedToken,
| 'surface'
| 'reading'
| 'headword'
| 'startPos'
| 'endPos'
| 'partOfSpeech'
| 'isMerged'
| 'isKnown'
| 'isNPlusOneTarget'
| 'frequencyRank'
| 'jlptLevel'
> & {
isNameMatch: boolean;
className: string;
frequencyRankLabel: string | null;
jlptLevelLabel: string | null;
};
function escapeHtml(text: string): string {
return text
.replaceAll('&', '&amp;')
@@ -46,11 +66,29 @@ function computeFrequencyClass(
return 'word-frequency-single';
}
function getFrequencyRankLabel(
token: MergedToken,
options: SubtitleWebsocketFrequencyOptions,
): string | null {
if (!options.enabled) return null;
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
const rank = Math.max(1, Math.floor(token.frequencyRank));
const topX = Math.max(1, Math.floor(options.topX));
return rank <= topX ? String(rank) : null;
}
function getJlptLevelLabel(token: MergedToken): string | null {
return token.jlptLevel ?? null;
}
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
const classes = ['word'];
if (token.isNPlusOneTarget) {
classes.push('word-n-plus-one');
} else if (token.isNameMatch) {
classes.push('word-name-match');
} else if (token.isKnown) {
classes.push('word-known');
}
@@ -59,7 +97,7 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
}
if (!token.isKnown && !token.isNPlusOneTarget) {
if (!token.isKnown && !token.isNPlusOneTarget && !token.isNameMatch) {
const frequencyClass = computeFrequencyClass(token, options);
if (frequencyClass) {
classes.push(frequencyClass);
@@ -69,6 +107,55 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
return classes.join(' ');
}
function serializeWordDataAttributes(
token: MergedToken,
options: SubtitleWebsocketFrequencyOptions,
): string {
const attributes: string[] = [];
if (token.reading) {
attributes.push(`data-reading="${escapeHtml(token.reading)}"`);
}
if (token.headword) {
attributes.push(`data-headword="${escapeHtml(token.headword)}"`);
}
const frequencyRankLabel = getFrequencyRankLabel(token, options);
if (frequencyRankLabel) {
attributes.push(`data-frequency-rank="${escapeHtml(frequencyRankLabel)}"`);
}
const jlptLevelLabel = getJlptLevelLabel(token);
if (jlptLevelLabel) {
attributes.push(`data-jlpt-level="${escapeHtml(jlptLevelLabel)}"`);
}
return attributes.length > 0 ? ` ${attributes.join(' ')}` : '';
}
function serializeSubtitleToken(
token: MergedToken,
options: SubtitleWebsocketFrequencyOptions,
): SerializedSubtitleToken {
return {
surface: token.surface,
reading: token.reading,
headword: token.headword,
startPos: token.startPos,
endPos: token.endPos,
partOfSpeech: token.partOfSpeech,
isMerged: token.isMerged,
isKnown: token.isKnown,
isNPlusOneTarget: token.isNPlusOneTarget,
isNameMatch: token.isNameMatch ?? false,
jlptLevel: token.jlptLevel,
frequencyRank: token.frequencyRank,
className: computeWordClass(token, options),
frequencyRankLabel: getFrequencyRankLabel(token, options),
jlptLevelLabel: getJlptLevelLabel(token),
};
}
export function serializeSubtitleMarkup(
payload: SubtitleData,
options: SubtitleWebsocketFrequencyOptions,
@@ -80,11 +167,12 @@ export function serializeSubtitleMarkup(
const chunks: string[] = [];
for (const token of payload.tokens) {
const klass = computeWordClass(token, options);
const attrs = serializeWordDataAttributes(token, options);
const parts = token.surface.split('\n');
for (let index = 0; index < parts.length; index += 1) {
const part = parts[index];
if (part) {
chunks.push(`<span class="${klass}">${escapeHtml(part)}</span>`);
chunks.push(`<span class="${klass}"${attrs}>${escapeHtml(part)}</span>`);
}
if (index < parts.length - 1) {
chunks.push('<br>');
@@ -99,7 +187,23 @@ export function serializeSubtitleWebsocketMessage(
payload: SubtitleData,
options: SubtitleWebsocketFrequencyOptions,
): string {
return JSON.stringify({ sentence: serializeSubtitleMarkup(payload, options) });
return JSON.stringify({
version: 1,
text: payload.text,
sentence: serializeSubtitleMarkup(payload, options),
tokens: payload.tokens?.map((token) => serializeSubtitleToken(token, options)) ?? [],
});
}
export function serializeInitialSubtitleWebsocketMessage(
payload: SubtitleData | null,
options: SubtitleWebsocketFrequencyOptions,
): string | null {
if (!payload || !payload.text.trim()) {
return null;
}
return serializeSubtitleWebsocketMessage(payload, options);
}
export class SubtitleWebSocket {
@@ -114,7 +218,11 @@ export class SubtitleWebSocket {
return (this.server?.clients.size ?? 0) > 0;
}
public start(port: number, getCurrentSubtitleText: () => string): void {
public start(
port: number,
getCurrentSubtitleData: () => SubtitleData | null,
getFrequencyOptions: () => SubtitleWebsocketFrequencyOptions,
): void {
this.server = new WebSocket.Server({ port, host: '127.0.0.1' });
this.server.on('connection', (ws: WebSocket) => {
@@ -124,9 +232,12 @@ export class SubtitleWebSocket {
return;
}
const currentText = getCurrentSubtitleText();
if (currentText) {
ws.send(JSON.stringify({ sentence: currentText }));
const currentMessage = serializeInitialSubtitleWebsocketMessage(
getCurrentSubtitleData(),
getFrequencyOptions(),
);
if (currentMessage) {
ws.send(currentMessage);
}
});

View File

@@ -0,0 +1,27 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { injectTexthookerBootstrapHtml } from './texthooker';
test('injectTexthookerBootstrapHtml injects websocket bootstrap before head close', () => {
const html = '<html><head><title>Texthooker</title></head><body></body></html>';
const actual = injectTexthookerBootstrapHtml(html, 'ws://127.0.0.1:6678');
assert.match(
actual,
/window\.localStorage\.setItem\('bannou-texthooker-websocketUrl', "ws:\/\/127\.0\.0\.1:6678"\)/,
);
assert.ok(actual.indexOf('</script></head>') !== -1);
assert.ok(actual.includes('bannou-texthooker-websocketUrl'));
assert.ok(!actual.includes('bannou-texthooker-enableKnownWordColoring'));
assert.ok(!actual.includes('bannou-texthooker-enableNPlusOneColoring'));
assert.ok(!actual.includes('bannou-texthooker-enableNameMatchColoring'));
assert.ok(!actual.includes('bannou-texthooker-enableFrequencyColoring'));
assert.ok(!actual.includes('bannou-texthooker-enableJlptColoring'));
});
test('injectTexthookerBootstrapHtml leaves html unchanged without websocketUrl', () => {
const html = '<html><head></head><body></body></html>';
assert.equal(injectTexthookerBootstrapHtml(html), html);
});

View File

@@ -5,6 +5,22 @@ import { createLogger } from '../../logger';
const logger = createLogger('main:texthooker');
export function injectTexthookerBootstrapHtml(html: string, websocketUrl?: string): string {
if (!websocketUrl) {
return html;
}
const bootstrapScript = `<script>window.localStorage.setItem('bannou-texthooker-websocketUrl', ${JSON.stringify(
websocketUrl,
)});</script>`;
if (html.includes('</head>')) {
return html.replace('</head>', `${bootstrapScript}</head>`);
}
return `${bootstrapScript}${html}`;
}
export class Texthooker {
private server: http.Server | null = null;
@@ -12,7 +28,11 @@ export class Texthooker {
return this.server !== null;
}
public start(port: number): http.Server | null {
public start(port: number, websocketUrl?: string): http.Server | null {
if (this.server) {
return this.server;
}
const texthookerPath = this.getTexthookerPath();
if (!texthookerPath) {
logger.error('texthooker-ui not found');
@@ -42,8 +62,12 @@ export class Texthooker {
res.end('Not found');
return;
}
const responseData =
urlPath === '/' || urlPath === '/index.html'
? Buffer.from(injectTexthookerBootstrapHtml(data.toString('utf-8'), websocketUrl))
: data;
res.writeHead(200, { 'Content-Type': mimeTypes[ext] || 'text/plain' });
res.end(data);
res.end(responseData);
});
});

View File

@@ -24,6 +24,7 @@ interface YomitanTokenInput {
surface: string;
reading?: string;
headword?: string;
isNameMatch?: boolean;
}
function makeDepsFromYomitanTokens(
@@ -53,6 +54,7 @@ function makeDepsFromYomitanTokens(
headword: token.headword ?? token.surface,
startPos,
endPos,
isNameMatch: token.isNameMatch ?? false,
};
});
},
@@ -115,6 +117,20 @@ test('tokenizeSubtitle assigns JLPT level to parsed Yomitan tokens', async () =>
assert.equal(result.tokens?.[0]?.jlptLevel, 'N5');
});
test('tokenizeSubtitle preserves Yomitan name-match metadata on tokens', async () => {
const result = await tokenizeSubtitle(
'アクアです',
makeDepsFromYomitanTokens([
{ surface: 'アクア', reading: 'あくあ', headword: 'アクア', isNameMatch: true },
{ surface: 'です', reading: 'です', headword: 'です' },
]),
);
assert.equal(result.tokens?.length, 2);
assert.equal((result.tokens?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
assert.equal((result.tokens?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
});
test('tokenizeSubtitle caches JLPT lookups across repeated tokens', async () => {
let lookupCalls = 0;
const result = await tokenizeSubtitle(
@@ -1235,6 +1251,30 @@ test('tokenizeSubtitle normalizes newlines before Yomitan parse request', async
assert.equal(result.tokens, null);
});
test('tokenizeSubtitle collapses zero-width separators before Yomitan parse request', async () => {
let parseInput = '';
const result = await tokenizeSubtitle(
'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが',
makeDeps({
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async (script: string) => {
parseInput = script;
return null;
},
},
}) as unknown as Electron.BrowserWindow,
}),
);
assert.match(parseInput, /キリキリと かかってこい このヘナチョコ冒険者どもめが!/);
assert.equal(result.text, 'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが');
assert.equal(result.tokens, null);
});
test('tokenizeSubtitle returns null tokens when Yomitan parsing is unavailable', async () => {
const result = await tokenizeSubtitle('猫です', makeDeps());
@@ -1821,9 +1861,9 @@ test('tokenizeSubtitle keeps parsing explicit by scanning-parser source only', a
assert.equal(result.tokens?.[4]?.frequencyRank, 1500);
});
test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', async () => {
test('tokenizeSubtitle still assigns frequency to non-known multi-character Yomitan tokens', async () => {
const result = await tokenizeSubtitle(
'小園',
'小園友達',
makeDeps({
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
@@ -1844,9 +1884,9 @@ test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', asy
],
[
{
text: '',
reading: '',
headwords: [[{ term: '' }]],
text: '友達',
reading: 'ともだち',
headwords: [[{ term: '友達' }]],
},
],
],
@@ -1855,7 +1895,7 @@ test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', asy
},
}) as unknown as Electron.BrowserWindow,
getFrequencyDictionaryEnabled: () => true,
getFrequencyRank: (text) => (text === '小園' ? 75 : text === '' ? 3000 : null),
getFrequencyRank: (text) => (text === '小園' ? 75 : text === '友達' ? 3000 : null),
isKnownWord: (text) => text === '小園',
}),
);
@@ -2595,6 +2635,21 @@ test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and freque
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
});
test('tokenizeSubtitle excludes single-kana merged tokens from frequency highlighting', async () => {
const result = await tokenizeSubtitle(
'た',
makeDepsFromYomitanTokens([{ surface: 'た', reading: 'た', headword: 'た' }], {
getFrequencyDictionaryEnabled: () => true,
getFrequencyRank: (text) => (text === 'た' ? 17 : null),
getMinSentenceWordsForNPlusOne: () => 1,
tokenizeWithMecab: async () => null,
}),
);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.frequencyRank, undefined);
});
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
const result = await tokenizeSubtitle(
'になれば',

View File

@@ -44,6 +44,7 @@ export interface TokenizerServiceDeps {
getJlptLevel: (text: string) => JlptLevel | null;
getNPlusOneEnabled?: () => boolean;
getJlptEnabled?: () => boolean;
getNameMatchEnabled?: () => boolean;
getFrequencyDictionaryEnabled?: () => boolean;
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
getFrequencyRank?: FrequencyDictionaryLookup;
@@ -73,6 +74,7 @@ export interface TokenizerDepsRuntimeOptions {
getJlptLevel: (text: string) => JlptLevel | null;
getNPlusOneEnabled?: () => boolean;
getJlptEnabled?: () => boolean;
getNameMatchEnabled?: () => boolean;
getFrequencyDictionaryEnabled?: () => boolean;
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
getFrequencyRank?: FrequencyDictionaryLookup;
@@ -85,6 +87,7 @@ export interface TokenizerDepsRuntimeOptions {
interface TokenizerAnnotationOptions {
nPlusOneEnabled: boolean;
jlptEnabled: boolean;
nameMatchEnabled: boolean;
frequencyEnabled: boolean;
frequencyMatchMode: FrequencyDictionaryMatchMode;
minSentenceWordsForNPlusOne: number | undefined;
@@ -106,6 +109,7 @@ const DEFAULT_ANNOTATION_POS1_EXCLUSIONS = resolveAnnotationPos1ExclusionSet(
const DEFAULT_ANNOTATION_POS2_EXCLUSIONS = resolveAnnotationPos2ExclusionSet(
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
);
const INVISIBLE_SEPARATOR_PATTERN = /[\u200b\u2060\ufeff]/g;
function getKnownWordLookup(
deps: TokenizerServiceDeps,
@@ -189,6 +193,7 @@ export function createTokenizerDepsRuntime(
getJlptLevel: options.getJlptLevel,
getNPlusOneEnabled: options.getNPlusOneEnabled,
getJlptEnabled: options.getJlptEnabled,
getNameMatchEnabled: options.getNameMatchEnabled,
getFrequencyDictionaryEnabled: options.getFrequencyDictionaryEnabled,
getFrequencyDictionaryMatchMode: options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
getFrequencyRank: options.getFrequencyRank,
@@ -300,6 +305,7 @@ function normalizeSelectedYomitanTokens(tokens: MergedToken[]): MergedToken[] {
isMerged: token.isMerged ?? true,
isKnown: token.isKnown ?? false,
isNPlusOneTarget: token.isNPlusOneTarget ?? false,
isNameMatch: token.isNameMatch ?? false,
reading: normalizeYomitanMergedReading(token),
}));
}
@@ -459,6 +465,7 @@ function getAnnotationOptions(deps: TokenizerServiceDeps): TokenizerAnnotationOp
return {
nPlusOneEnabled: deps.getNPlusOneEnabled?.() !== false,
jlptEnabled: deps.getJlptEnabled?.() !== false,
nameMatchEnabled: deps.getNameMatchEnabled?.() !== false,
frequencyEnabled: deps.getFrequencyDictionaryEnabled?.() !== false,
frequencyMatchMode: deps.getFrequencyDictionaryMatchMode?.() ?? 'headword',
minSentenceWordsForNPlusOne: deps.getMinSentenceWordsForNPlusOne?.(),
@@ -472,7 +479,9 @@ async function parseWithYomitanInternalParser(
deps: TokenizerServiceDeps,
options: TokenizerAnnotationOptions,
): Promise<MergedToken[] | null> {
const selectedTokens = await requestYomitanScanTokens(text, deps, logger);
const selectedTokens = await requestYomitanScanTokens(text, deps, logger, {
includeNameMatchMetadata: options.nameMatchEnabled,
});
if (!selectedTokens || selectedTokens.length === 0) {
return null;
}
@@ -488,6 +497,7 @@ async function parseWithYomitanInternalParser(
isMerged: true,
isKnown: false,
isNPlusOneTarget: false,
isNameMatch: token.isNameMatch ?? false,
}),
),
);
@@ -563,7 +573,11 @@ export async function tokenizeSubtitle(
return { text, tokens: null };
}
const tokenizeText = displayText.replace(/\n/g, ' ').replace(/\s+/g, ' ').trim();
const tokenizeText = displayText
.replace(INVISIBLE_SEPARATOR_PATTERN, ' ')
.replace(/\n/g, ' ')
.replace(/\s+/g, ' ')
.trim();
const annotationOptions = getAnnotationOptions(deps);
const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps, annotationOptions);

View File

@@ -252,12 +252,12 @@ test('annotateTokens applies configured pos1 exclusions to both frequency and N+
test('annotateTokens allows previously default-excluded pos1 when removed from effective set', () => {
const tokens = [
makeToken({
surface: '',
headword: '',
surface: 'まで',
headword: 'まで',
partOfSpeech: PartOfSpeech.other,
pos1: '助詞',
startPos: 0,
endPos: 1,
endPos: 2,
frequencyRank: 8,
}),
];
@@ -314,6 +314,52 @@ test('annotateTokens excludes likely kana SFX tokens from frequency when POS tag
assert.equal(result[0]?.frequencyRank, undefined);
});
test('annotateTokens excludes single hiragana and katakana tokens from frequency when POS tags are missing', () => {
const tokens = [
makeToken({
surface: 'た',
reading: 'た',
headword: 'た',
pos1: '',
pos2: '',
partOfSpeech: PartOfSpeech.other,
frequencyRank: 21,
startPos: 0,
endPos: 1,
}),
makeToken({
surface: 'ア',
reading: 'ア',
headword: 'ア',
pos1: '',
pos2: '',
partOfSpeech: PartOfSpeech.other,
frequencyRank: 22,
startPos: 1,
endPos: 2,
}),
makeToken({
surface: '山',
reading: 'やま',
headword: '山',
pos1: '',
pos2: '',
partOfSpeech: PartOfSpeech.other,
frequencyRank: 23,
startPos: 2,
endPos: 3,
}),
];
const result = annotateTokens(tokens, makeDeps(), {
minSentenceWordsForNPlusOne: 1,
});
assert.equal(result[0]?.frequencyRank, undefined);
assert.equal(result[1]?.frequencyRank, undefined);
assert.equal(result[2]?.frequencyRank, 23);
});
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
const tokens = [
makeToken({

View File

@@ -103,6 +103,10 @@ function isFrequencyExcludedByPos(
pos1Exclusions: ReadonlySet<string>,
pos2Exclusions: ReadonlySet<string>,
): boolean {
if (isSingleKanaFrequencyNoiseToken(token.surface)) {
return true;
}
const normalizedPos1 = normalizePos1Tag(token.pos1);
const hasPos1 = normalizedPos1.length > 0;
if (isExcludedByTagSet(normalizedPos1, pos1Exclusions)) {
@@ -363,6 +367,20 @@ function isLikelyFrequencyNoiseToken(token: MergedToken): boolean {
return false;
}
function isSingleKanaFrequencyNoiseToken(text: string | undefined): boolean {
if (typeof text !== 'string') {
return false;
}
const normalized = text.trim();
if (!normalized) {
return false;
}
const chars = [...normalized];
return chars.length === 1 && isKanaChar(chars[0]!);
}
function isJlptEligibleToken(token: MergedToken): boolean {
if (token.pos1 && shouldIgnoreJlptForMecabPos1(token.pos1)) {
return false;

View File

@@ -3,12 +3,12 @@ import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import test from 'node:test';
import * as vm from 'node:vm';
import {
getYomitanDictionaryInfo,
importYomitanDictionaryFromZip,
deleteYomitanDictionaryByTitle,
removeYomitanDictionarySettings,
requestYomitanParseResults,
requestYomitanScanTokens,
requestYomitanTermFrequencies,
syncYomitanDefaultAnkiServer,
@@ -40,6 +40,40 @@ function createDeps(
};
}
async function runInjectedYomitanScript(
script: string,
handler: (action: string, params: unknown) => unknown,
): Promise<unknown> {
return await vm.runInNewContext(script, {
chrome: {
runtime: {
lastError: null,
sendMessage: (
payload: { action?: string; params?: unknown },
callback: (response: { result?: unknown; error?: { message?: string } }) => void,
) => {
try {
callback({ result: handler(payload.action ?? '', payload.params) });
} catch (error) {
callback({ error: { message: (error as Error).message } });
}
},
},
},
Array,
Error,
JSON,
Map,
Math,
Number,
Object,
Promise,
RegExp,
Set,
String,
});
}
test('syncYomitanDefaultAnkiServer updates default profile server when script reports update', async () => {
let scriptValue = '';
const deps = createDeps(async (script) => {
@@ -451,6 +485,331 @@ test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of
assert.match(scannerScript ?? '', /deinflect:\s*true/);
});
test('requestYomitanScanTokens marks tokens backed by SubMiner character dictionary entries', async () => {
const deps = createDeps(async (script) => {
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return [
{
surface: 'アクア',
reading: 'あくあ',
headword: 'アクア',
startPos: 0,
endPos: 3,
isNameMatch: true,
},
{
surface: 'です',
reading: 'です',
headword: 'です',
startPos: 3,
endPos: 5,
isNameMatch: false,
},
];
});
const result = await requestYomitanScanTokens('アクアです', deps, {
error: () => undefined,
});
assert.equal(result?.length, 2);
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
assert.equal((result?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
});
test('requestYomitanScanTokens skips name-match work when disabled', async () => {
let scannerScript = '';
const deps = createDeps(async (script) => {
if (script.includes('termsFind')) {
scannerScript = script;
}
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return [
{
surface: 'アクア',
reading: 'あくあ',
headword: 'アクア',
startPos: 0,
endPos: 3,
},
];
});
const result = await requestYomitanScanTokens(
'アクア',
deps,
{ error: () => undefined },
{ includeNameMatchMetadata: false },
);
assert.equal(result?.length, 1);
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, undefined);
assert.match(scannerScript, /const includeNameMatchMetadata = false;/);
});
test('requestYomitanScanTokens marks grouped entries when SubMiner dictionary alias only exists on definitions', async () => {
let scannerScript = '';
const deps = createDeps(async (script) => {
if (script.includes('termsFind')) {
scannerScript = script;
return [];
}
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return null;
});
await requestYomitanScanTokens(
'カズマ',
deps,
{ error: () => undefined },
{ includeNameMatchMetadata: true },
);
assert.match(scannerScript, /getPreferredHeadword/);
const result = await runInjectedYomitanScript(scannerScript, (action, params) => {
if (action === 'termsFind') {
const text = (params as { text?: string } | undefined)?.text;
if (text === 'カズマ') {
return {
originalTextLength: 3,
dictionaryEntries: [
{
dictionaryAlias: '',
headwords: [
{
term: 'カズマ',
reading: 'かずま',
sources: [{ originalText: 'カズマ', isPrimary: true, matchType: 'exact' }],
},
],
definitions: [
{ dictionary: 'JMdict', dictionaryAlias: 'JMdict' },
{
dictionary: 'SubMiner Character Dictionary (AniList 130298)',
dictionaryAlias: 'SubMiner Character Dictionary (AniList 130298)',
},
],
},
],
};
}
return { originalTextLength: 0, dictionaryEntries: [] };
}
throw new Error(`unexpected action: ${action}`);
});
assert.equal(Array.isArray(result), true);
assert.equal((result as { length?: number } | null)?.length, 1);
assert.equal((result as Array<{ surface?: string }>)[0]?.surface, 'カズマ');
assert.equal((result as Array<{ headword?: string }>)[0]?.headword, 'カズマ');
assert.equal((result as Array<{ startPos?: number }>)[0]?.startPos, 0);
assert.equal((result as Array<{ endPos?: number }>)[0]?.endPos, 3);
assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true);
});
test('requestYomitanScanTokens skips fallback fragments without exact primary source matches', async () => {
const deps = createDeps(async (script) => {
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return await runInjectedYomitanScript(script, (action, params) => {
if (action !== 'termsFind') {
throw new Error(`unexpected action: ${action}`);
}
const text = (params as { text?: string } | undefined)?.text ?? '';
if (text.startsWith('だが ')) {
return {
originalTextLength: 2,
dictionaryEntries: [
{
headwords: [
{
term: 'だが',
reading: 'だが',
sources: [{ originalText: 'だが', isPrimary: true, matchType: 'exact' }],
},
],
},
],
};
}
if (text.startsWith('それでも')) {
return {
originalTextLength: 4,
dictionaryEntries: [
{
headwords: [
{
term: 'それでも',
reading: 'それでも',
sources: [{ originalText: 'それでも', isPrimary: true, matchType: 'exact' }],
},
],
},
],
};
}
if (text.startsWith('届かぬ')) {
return {
originalTextLength: 3,
dictionaryEntries: [
{
headwords: [
{
term: '届く',
reading: 'とどく',
sources: [{ originalText: '届かぬ', isPrimary: true, matchType: 'exact' }],
},
],
},
],
};
}
if (text.startsWith('高み')) {
return {
originalTextLength: 2,
dictionaryEntries: [
{
headwords: [
{
term: '高み',
reading: 'たかみ',
sources: [{ originalText: '高み', isPrimary: true, matchType: 'exact' }],
},
],
},
],
};
}
if (text.startsWith('があった')) {
return {
originalTextLength: 2,
dictionaryEntries: [
{
headwords: [
{
term: 'があ',
reading: '',
sources: [{ originalText: 'が', isPrimary: true, matchType: 'exact' }],
},
],
},
],
};
}
if (text.startsWith('あった')) {
return {
originalTextLength: 3,
dictionaryEntries: [
{
headwords: [
{
term: 'ある',
reading: 'ある',
sources: [{ originalText: 'あった', isPrimary: true, matchType: 'exact' }],
},
],
},
],
};
}
return { originalTextLength: 0, dictionaryEntries: [] };
});
});
const result = await requestYomitanScanTokens('だが それでも届かぬ高みがあった', deps, {
error: () => undefined,
});
assert.deepEqual(
result?.map((token) => ({
surface: token.surface,
headword: token.headword,
startPos: token.startPos,
endPos: token.endPos,
})),
[
{
surface: 'だが',
headword: 'だが',
startPos: 0,
endPos: 2,
},
{
surface: 'それでも',
headword: 'それでも',
startPos: 3,
endPos: 7,
},
{
surface: '届かぬ',
headword: '届く',
startPos: 7,
endPos: 10,
},
{
surface: '高み',
headword: '高み',
startPos: 10,
endPos: 12,
},
{
surface: 'あった',
headword: 'ある',
startPos: 13,
endPos: 16,
},
],
);
});
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
let scriptValue = '';
const deps = createDeps(async (script) => {
@@ -464,7 +823,7 @@ test('getYomitanDictionaryInfo requests dictionary info via backend action', asy
assert.match(scriptValue, /getDictionaryInfo/);
});
test('dictionary settings helpers upsert and remove dictionary entries', async () => {
test('dictionary settings helpers upsert and remove dictionary entries without reordering', async () => {
const scripts: string[] = [];
const optionsFull = {
profileCurrent: 0,
@@ -514,7 +873,8 @@ test('dictionary settings helpers upsert and remove dictionary entries', async (
const upsertScript = scripts.find(
(script) =>
script.includes('setAllSettings') && script.includes('"SubMiner Character Dictionary (AniList 1)"'),
script.includes('setAllSettings') &&
script.includes('"SubMiner Character Dictionary (AniList 1)"'),
);
assert.ok(upsertScript);
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
@@ -554,9 +914,18 @@ test('importYomitanDictionaryFromZip uses settings automation bridge instead of
});
assert.equal(imported, true);
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
assert.equal(scripts.some((script) => script.includes('importDictionaryArchiveBase64')), true);
assert.equal(scripts.some((script) => script.includes('subminerImportDictionary')), false);
assert.equal(
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
true,
);
assert.equal(
scripts.some((script) => script.includes('importDictionaryArchiveBase64')),
true,
);
assert.equal(
scripts.some((script) => script.includes('subminerImportDictionary')),
false,
);
});
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
@@ -586,7 +955,16 @@ test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of
);
assert.equal(deleted, true);
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
assert.equal(scripts.some((script) => script.includes('deleteDictionary')), true);
assert.equal(scripts.some((script) => script.includes('subminerDeleteDictionary')), false);
assert.equal(
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
true,
);
assert.equal(
scripts.some((script) => script.includes('deleteDictionary')),
true,
);
assert.equal(
scripts.some((script) => script.includes('subminerDeleteDictionary')),
false,
);
});

View File

@@ -45,6 +45,7 @@ export interface YomitanScanToken {
headword: string;
startPos: number;
endPos: number;
isNameMatch?: boolean;
}
interface YomitanProfileMetadata {
@@ -75,7 +76,8 @@ function isScanTokenArray(value: unknown): value is YomitanScanToken[] {
typeof entry.reading === 'string' &&
typeof entry.headword === 'string' &&
typeof entry.startPos === 'number' &&
typeof entry.endPos === 'number',
typeof entry.endPos === 'number' &&
(entry.isNameMatch === undefined || typeof entry.isNameMatch === 'boolean'),
)
);
}
@@ -560,9 +562,7 @@ async function createYomitanExtensionWindow(
});
return window;
} catch (err) {
logger.error(
`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`,
);
logger.error(`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`);
if (!window.isDestroyed()) {
window.destroy();
}
@@ -772,24 +772,85 @@ const YOMITAN_SCANNING_HELPERS = String.raw`
return segments;
}
function getPreferredHeadword(dictionaryEntries, token) {
for (const dictionaryEntry of dictionaryEntries || []) {
for (const headword of dictionaryEntry.headwords || []) {
const validSources = [];
for (const src of headword.sources || []) {
if (src.originalText !== token) { continue; }
if (!src.isPrimary) { continue; }
if (src.matchType !== 'exact') { continue; }
validSources.push(src);
function appendDictionaryNames(target, value) {
if (!value || typeof value !== 'object') {
return;
}
const candidates = [
value.dictionary,
value.dictionaryName,
value.name,
value.title,
value.dictionaryTitle,
value.dictionaryAlias
];
for (const candidate of candidates) {
if (typeof candidate === 'string' && candidate.trim().length > 0) {
target.push(candidate.trim());
}
if (validSources.length > 0) { return {term: headword.term, reading: headword.reading}; }
}
}
const fallback = dictionaryEntries?.[0]?.headwords?.[0];
return fallback ? {term: fallback.term, reading: fallback.reading} : null;
function getDictionaryEntryNames(entry) {
const names = [];
appendDictionaryNames(names, entry);
for (const definition of entry?.definitions || []) {
appendDictionaryNames(names, definition);
}
for (const frequency of entry?.frequencies || []) {
appendDictionaryNames(names, frequency);
}
for (const pronunciation of entry?.pronunciations || []) {
appendDictionaryNames(names, pronunciation);
}
return names;
}
function isNameDictionaryEntry(entry) {
if (!includeNameMatchMetadata || !entry || typeof entry !== 'object') {
return false;
}
return getDictionaryEntryNames(entry).some((name) => name.startsWith("SubMiner Character Dictionary"));
}
function hasExactPrimarySource(headword, token) {
for (const src of headword.sources || []) {
if (src.originalText !== token) { continue; }
if (!src.isPrimary) { continue; }
if (src.matchType !== 'exact') { continue; }
return true;
}
return false;
}
let matchedNameDictionary = false;
if (includeNameMatchMetadata) {
for (const dictionaryEntry of dictionaryEntries || []) {
if (!isNameDictionaryEntry(dictionaryEntry)) { continue; }
for (const headword of dictionaryEntry.headwords || []) {
if (!hasExactPrimarySource(headword, token)) { continue; }
matchedNameDictionary = true;
break;
}
if (matchedNameDictionary) { break; }
}
}
for (const dictionaryEntry of dictionaryEntries || []) {
for (const headword of dictionaryEntry.headwords || []) {
if (!hasExactPrimarySource(headword, token)) { continue; }
return {
term: headword.term,
reading: headword.reading,
isNameMatch: matchedNameDictionary || isNameDictionaryEntry(dictionaryEntry)
};
}
}
return null;
}
`;
function buildYomitanScanningScript(text: string, profileIndex: number, scanLength: number): string {
function buildYomitanScanningScript(
text: string,
profileIndex: number,
scanLength: number,
includeNameMatchMetadata: boolean,
): string {
return `
(async () => {
const invoke = (action, params) =>
@@ -811,6 +872,7 @@ function buildYomitanScanningScript(text: string, profileIndex: number, scanLeng
});
});
${YOMITAN_SCANNING_HELPERS}
const includeNameMatchMetadata = ${includeNameMatchMetadata ? 'true' : 'false'};
const text = ${JSON.stringify(text)};
const details = {matchType: "exact", deinflect: true};
const tokens = [];
@@ -834,6 +896,7 @@ ${YOMITAN_SCANNING_HELPERS}
headword: preferredHeadword.term,
startPos: i,
endPos: i + originalTextLength,
isNameMatch: includeNameMatchMetadata && preferredHeadword.isNameMatch === true,
});
i += originalTextLength;
continue;
@@ -944,6 +1007,9 @@ export async function requestYomitanScanTokens(
text: string,
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
options?: {
includeNameMatchMetadata?: boolean;
},
): Promise<YomitanScanToken[] | null> {
const yomitanExt = deps.getYomitanExt();
if (!text || !yomitanExt) {
@@ -962,7 +1028,12 @@ export async function requestYomitanScanTokens(
try {
const rawResult = await parserWindow.webContents.executeJavaScript(
buildYomitanScanningScript(text, profileIndex, scanLength),
buildYomitanScanningScript(
text,
profileIndex,
scanLength,
options?.includeNameMatchMetadata === true,
),
true,
);
if (isScanTokenArray(rawResult)) {
@@ -970,13 +1041,15 @@ export async function requestYomitanScanTokens(
}
if (Array.isArray(rawResult)) {
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
return selectedTokens?.map((token) => ({
surface: token.surface,
reading: token.reading,
headword: token.headword,
startPos: token.startPos,
endPos: token.endPos,
})) ?? null;
return (
selectedTokens?.map((token) => ({
surface: token.surface,
reading: token.reading,
headword: token.headword,
startPos: token.startPos,
endPos: token.endPos,
})) ?? null
);
}
return null;
} catch (err) {
@@ -1450,7 +1523,12 @@ export async function getYomitanDictionaryInfo(
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
): Promise<YomitanDictionaryInfo[]> {
const result = await invokeYomitanBackendAction<unknown>('getDictionaryInfo', undefined, deps, logger);
const result = await invokeYomitanBackendAction<unknown>(
'getDictionaryInfo',
undefined,
deps,
logger,
);
if (!Array.isArray(result)) {
return [];
}
@@ -1473,7 +1551,12 @@ export async function getYomitanSettingsFull(
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
): Promise<Record<string, unknown> | null> {
const result = await invokeYomitanBackendAction<unknown>('optionsGetFull', undefined, deps, logger);
const result = await invokeYomitanBackendAction<unknown>(
'optionsGetFull',
undefined,
deps,
logger,
);
return isObject(result) ? result : null;
}
@@ -1580,7 +1663,7 @@ export async function upsertYomitanDictionarySettings(
(entry) =>
isObject(entry) &&
typeof (entry as { name?: unknown }).name === 'string' &&
((entry as { name: string }).name.trim() === normalizedTitle),
(entry as { name: string }).name.trim() === normalizedTitle,
);
if (existingIndex >= 0) {
@@ -1596,7 +1679,7 @@ export async function upsertYomitanDictionarySettings(
continue;
}
dictionaries.unshift(createDefaultDictionarySettings(normalizedTitle, true));
dictionaries.push(createDefaultDictionarySettings(normalizedTitle, true));
changed = true;
}

View File

@@ -90,7 +90,10 @@ export function shouldCopyYomitanExtension(sourceDir: string, targetDir: string)
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
}
export function ensureExtensionCopy(sourceDir: string, userDataPath: string): {
export function ensureExtensionCopy(
sourceDir: string,
userDataPath: string,
): {
targetDir: string;
copied: boolean;
} {

View File

@@ -75,7 +75,10 @@ test('ensureExtensionCopy refreshes copied extension when display files change',
assert.equal(result.targetDir, targetDir);
assert.equal(result.copied, true);
assert.equal(
fs.readFileSync(path.join(targetDir, 'js', 'display', 'structured-content-generator.js'), 'utf8'),
fs.readFileSync(
path.join(targetDir, 'js', 'display', 'structured-content-generator.js'),
'utf8',
),
'new display code',
);
});

View File

@@ -1,13 +1,17 @@
import { BrowserWindow, Extension, session } from 'electron';
import * as fs from 'fs';
import * as path from 'path';
import { createLogger } from '../../logger';
import { ensureExtensionCopy } from './yomitan-extension-copy';
import {
getYomitanExtensionSearchPaths,
resolveExistingYomitanExtensionPath,
} from './yomitan-extension-paths';
const logger = createLogger('main:yomitan-extension-loader');
export interface YomitanExtensionLoaderDeps {
userDataPath: string;
extensionPath?: string;
getYomitanParserWindow: () => BrowserWindow | null;
setYomitanParserWindow: (window: BrowserWindow | null) => void;
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
@@ -18,25 +22,17 @@ export interface YomitanExtensionLoaderDeps {
export async function loadYomitanExtension(
deps: YomitanExtensionLoaderDeps,
): Promise<Extension | null> {
const searchPaths = [
path.join(__dirname, '..', '..', 'vendor', 'yomitan'),
path.join(__dirname, '..', '..', '..', 'vendor', 'yomitan'),
path.join(process.resourcesPath, 'yomitan'),
'/usr/share/SubMiner/yomitan',
path.join(deps.userDataPath, 'yomitan'),
];
let extPath: string | null = null;
for (const p of searchPaths) {
if (fs.existsSync(p)) {
extPath = p;
break;
}
}
const searchPaths = getYomitanExtensionSearchPaths({
explicitPath: deps.extensionPath,
moduleDir: __dirname,
resourcesPath: process.resourcesPath,
userDataPath: deps.userDataPath,
});
let extPath = resolveExistingYomitanExtensionPath(searchPaths, fs.existsSync);
if (!extPath) {
logger.error('Yomitan extension not found in any search path');
logger.error('Install Yomitan to one of:', searchPaths);
logger.error('Run `bun run build:yomitan` or install Yomitan to one of:', searchPaths);
return null;
}

View File

@@ -0,0 +1,50 @@
import assert from 'node:assert/strict';
import path from 'node:path';
import test from 'node:test';
import {
getYomitanExtensionSearchPaths,
resolveExistingYomitanExtensionPath,
} from './yomitan-extension-paths';
test('getYomitanExtensionSearchPaths prioritizes generated build output before packaged fallbacks', () => {
const searchPaths = getYomitanExtensionSearchPaths({
cwd: '/repo',
moduleDir: '/repo/dist/core/services',
resourcesPath: '/opt/SubMiner/resources',
userDataPath: '/Users/kyle/.config/SubMiner',
});
assert.deepEqual(searchPaths, [
path.join('/repo', 'build', 'yomitan'),
path.join('/opt/SubMiner/resources', 'yomitan'),
'/usr/share/SubMiner/yomitan',
path.join('/Users/kyle/.config/SubMiner', 'yomitan'),
]);
});
test('resolveExistingYomitanExtensionPath returns first manifest-backed candidate', () => {
const existing = new Set<string>([
path.join('/repo', 'build', 'yomitan', 'manifest.json'),
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext', 'manifest.json'),
]);
const resolved = resolveExistingYomitanExtensionPath(
[
path.join('/repo', 'build', 'yomitan'),
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext'),
],
(candidate) => existing.has(candidate),
);
assert.equal(resolved, path.join('/repo', 'build', 'yomitan'));
});
test('resolveExistingYomitanExtensionPath ignores source tree without built manifest', () => {
const resolved = resolveExistingYomitanExtensionPath(
[path.join('/repo', 'vendor', 'subminer-yomitan', 'ext')],
() => false,
);
assert.equal(resolved, null);
});

View File

@@ -0,0 +1,60 @@
import * as fs from 'node:fs';
import * as path from 'node:path';
export interface YomitanExtensionPathOptions {
explicitPath?: string;
cwd?: string;
moduleDir?: string;
resourcesPath?: string;
userDataPath?: string;
}
function pushUnique(values: string[], candidate: string | null | undefined): void {
if (!candidate || values.includes(candidate)) {
return;
}
values.push(candidate);
}
export function getYomitanExtensionSearchPaths(
options: YomitanExtensionPathOptions = {},
): string[] {
const searchPaths: string[] = [];
pushUnique(searchPaths, options.explicitPath ? path.resolve(options.explicitPath) : null);
pushUnique(searchPaths, options.cwd ? path.resolve(options.cwd, 'build', 'yomitan') : null);
pushUnique(
searchPaths,
options.moduleDir
? path.resolve(options.moduleDir, '..', '..', '..', 'build', 'yomitan')
: null,
);
pushUnique(
searchPaths,
options.resourcesPath ? path.join(options.resourcesPath, 'yomitan') : null,
);
pushUnique(searchPaths, '/usr/share/SubMiner/yomitan');
pushUnique(searchPaths, options.userDataPath ? path.join(options.userDataPath, 'yomitan') : null);
return searchPaths;
}
export function resolveExistingYomitanExtensionPath(
searchPaths: string[],
existsSync: (path: string) => boolean = fs.existsSync,
): string | null {
for (const candidate of searchPaths) {
if (existsSync(path.join(candidate, 'manifest.json'))) {
return candidate;
}
}
return null;
}
export function resolveYomitanExtensionPath(
options: YomitanExtensionPathOptions = {},
existsSync: (path: string) => boolean = fs.existsSync,
): string | null {
return resolveExistingYomitanExtensionPath(getYomitanExtensionSearchPaths(options), existsSync);
}

View File

@@ -2,6 +2,7 @@ import assert from 'node:assert/strict';
import path from 'node:path';
import test from 'node:test';
import { pathToFileURL } from 'node:url';
import { resolveYomitanExtensionPath } from './yomitan-extension-paths';
class FakeStyle {
private values = new Map<string, string>();
@@ -155,15 +156,14 @@ function findFirstByClass(node: FakeNode, className: string): FakeNode | null {
}
test('StructuredContentGenerator uses direct img loading for popup glossary images', async () => {
const yomitanRoot = resolveYomitanExtensionPath({ cwd: process.cwd() });
assert.ok(yomitanRoot, 'Run `bun run build:yomitan` before Yomitan integration tests.');
const { DisplayContentManager } = await import(
pathToFileURL(
path.join(process.cwd(), 'vendor/yomitan/js/display/display-content-manager.js'),
).href
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'display-content-manager.js')).href
);
const { StructuredContentGenerator } = await import(
pathToFileURL(
path.join(process.cwd(), 'vendor/yomitan/js/display/structured-content-generator.js'),
).href
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'structured-content-generator.js')).href
);
const createObjectURLCalls: string[] = [];
@@ -197,14 +197,10 @@ test('StructuredContentGenerator uses direct img loading for popup glossary imag
},
});
const generator = new StructuredContentGenerator(
manager,
new FakeDocument(),
{
devicePixelRatio: 1,
navigator: { userAgent: 'Mozilla/5.0' },
},
);
const generator = new StructuredContentGenerator(manager, new FakeDocument(), {
devicePixelRatio: 1,
navigator: { userAgent: 'Mozilla/5.0' },
});
const node = generator.createDefinitionImage(
{

View File

@@ -0,0 +1,70 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import fs from 'node:fs';
import path from 'node:path';
const DEAD_MODULE_PATHS = [
'src/translators/index.ts',
'src/subsync/engines.ts',
'src/subtitle/pipeline.ts',
'src/subtitle/stages/merge.ts',
'src/subtitle/stages/normalize.ts',
'src/subtitle/stages/normalize.test.ts',
'src/subtitle/stages/tokenize.ts',
'src/tokenizers/index.ts',
'src/token-mergers/index.ts',
] as const;
const FORBIDDEN_IMPORT_PATTERNS = [
/from ['"]\.\.?\/tokenizers['"]/,
/from ['"]\.\.?\/token-mergers['"]/,
/from ['"]\.\.?\/subtitle\/pipeline['"]/,
/from ['"]\.\.?\/subsync\/engines['"]/,
/from ['"]\.\.?\/translators['"]/,
] as const;
function readWorkspaceFile(relativePath: string): string {
return fs.readFileSync(path.join(process.cwd(), relativePath), 'utf8');
}
function collectSourceFiles(rootDir: string): string[] {
const absoluteRoot = path.join(process.cwd(), rootDir);
const out: string[] = [];
const visit = (currentDir: string) => {
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
const fullPath = path.join(currentDir, entry.name);
if (entry.isDirectory()) {
visit(fullPath);
continue;
}
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) {
continue;
}
out.push(path.relative(process.cwd(), fullPath).replaceAll('\\', '/'));
}
};
visit(absoluteRoot);
out.sort();
return out;
}
test('dead registry and pipeline modules stay removed from the repository', () => {
for (const relativePath of DEAD_MODULE_PATHS) {
assert.equal(
fs.existsSync(path.join(process.cwd(), relativePath)),
false,
`${relativePath} should stay deleted`,
);
}
});
test('live source tree no longer imports dead registry and pipeline modules', () => {
for (const relativePath of collectSourceFiles('src')) {
const source = readWorkspaceFile(relativePath);
for (const pattern of FORBIDDEN_IMPORT_PATTERNS) {
assert.doesNotMatch(source, pattern, `${relativePath} should not import ${pattern.source}`);
}
}
});

View File

@@ -0,0 +1,82 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import path from 'node:path';
import test from 'node:test';
import {
resolveConfigExampleOutputPaths,
writeConfigExampleArtifacts,
} from './generate-config-example';
function createWorkspace(name: string): string {
const baseDir = path.join(process.cwd(), '.tmp', 'generate-config-example-test');
fs.mkdirSync(baseDir, { recursive: true });
return fs.mkdtempSync(path.join(baseDir, `${name}-`));
}
test('resolveConfigExampleOutputPaths includes sibling docs repo and never local docs/public', () => {
const workspace = createWorkspace('with-docs-repo');
const projectRoot = path.join(workspace, 'SubMiner');
const docsRepoRoot = path.join(workspace, 'subminer-docs');
fs.mkdirSync(projectRoot, { recursive: true });
fs.mkdirSync(docsRepoRoot, { recursive: true });
try {
const outputPaths = resolveConfigExampleOutputPaths({ cwd: projectRoot });
assert.deepEqual(outputPaths, [
path.join(projectRoot, 'config.example.jsonc'),
path.join(docsRepoRoot, 'public', 'config.example.jsonc'),
]);
assert.equal(
outputPaths.includes(path.join(projectRoot, 'docs', 'public', 'config.example.jsonc')),
false,
);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('resolveConfigExampleOutputPaths stays repo-local when sibling docs repo is absent', () => {
const workspace = createWorkspace('without-docs-repo');
const projectRoot = path.join(workspace, 'SubMiner');
fs.mkdirSync(projectRoot, { recursive: true });
try {
const outputPaths = resolveConfigExampleOutputPaths({ cwd: projectRoot });
assert.deepEqual(outputPaths, [path.join(projectRoot, 'config.example.jsonc')]);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('writeConfigExampleArtifacts creates parent directories for resolved outputs', () => {
const workspace = createWorkspace('write-artifacts');
const projectRoot = path.join(workspace, 'SubMiner');
const docsRepoRoot = path.join(workspace, 'subminer-docs');
const template = '{\n "ok": true\n}\n';
fs.mkdirSync(projectRoot, { recursive: true });
fs.mkdirSync(docsRepoRoot, { recursive: true });
try {
const writtenPaths = writeConfigExampleArtifacts(template, {
cwd: projectRoot,
deps: { log: () => {} },
});
assert.deepEqual(writtenPaths, [
path.join(projectRoot, 'config.example.jsonc'),
path.join(docsRepoRoot, 'public', 'config.example.jsonc'),
]);
assert.equal(fs.readFileSync(path.join(projectRoot, 'config.example.jsonc'), 'utf8'), template);
assert.equal(
fs.readFileSync(path.join(docsRepoRoot, 'public', 'config.example.jsonc'), 'utf8'),
template,
);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});

View File

@@ -2,18 +2,62 @@ import * as fs from 'fs';
import * as path from 'path';
import { DEFAULT_CONFIG, generateConfigTemplate } from './config';
function main(): void {
const template = generateConfigTemplate(DEFAULT_CONFIG);
const outputPaths = [
path.join(process.cwd(), 'config.example.jsonc'),
path.join(process.cwd(), 'docs', 'public', 'config.example.jsonc'),
];
type ConfigExampleFsDeps = {
existsSync?: (candidate: string) => boolean;
mkdirSync?: (candidate: string, options: { recursive: true }) => void;
writeFileSync?: (candidate: string, content: string, encoding: BufferEncoding) => void;
log?: (message: string) => void;
};
for (const outputPath of outputPaths) {
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
fs.writeFileSync(outputPath, template, 'utf-8');
console.log(`Generated ${outputPath}`);
export function resolveConfigExampleOutputPaths(options?: {
cwd?: string;
docsRepoName?: string;
existsSync?: (candidate: string) => boolean;
}): string[] {
const cwd = options?.cwd ?? process.cwd();
const existsSync = options?.existsSync ?? fs.existsSync;
const docsRepoName = options?.docsRepoName ?? 'subminer-docs';
const outputPaths = [path.join(cwd, 'config.example.jsonc')];
const docsRepoRoot = path.resolve(cwd, '..', docsRepoName);
if (existsSync(docsRepoRoot)) {
outputPaths.push(path.join(docsRepoRoot, 'public', 'config.example.jsonc'));
}
return outputPaths;
}
main();
export function writeConfigExampleArtifacts(
template: string,
options?: {
cwd?: string;
docsRepoName?: string;
deps?: ConfigExampleFsDeps;
},
): string[] {
const mkdirSync = options?.deps?.mkdirSync ?? fs.mkdirSync;
const writeFileSync = options?.deps?.writeFileSync ?? fs.writeFileSync;
const log = options?.deps?.log ?? console.log;
const outputPaths = resolveConfigExampleOutputPaths({
cwd: options?.cwd,
docsRepoName: options?.docsRepoName,
existsSync: options?.deps?.existsSync,
});
for (const outputPath of outputPaths) {
mkdirSync(path.dirname(outputPath), { recursive: true });
writeFileSync(outputPath, template, 'utf-8');
log(`Generated ${outputPath}`);
}
return outputPaths;
}
function main(): void {
const template = generateConfigTemplate(DEFAULT_CONFIG);
writeConfigExampleArtifacts(template);
}
if (require.main === module) {
main();
}

View File

@@ -1,12 +1,35 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import {
normalizeStartupArgv,
sanitizeHelpEnv,
sanitizeStartupEnv,
sanitizeBackgroundEnv,
shouldDetachBackgroundLaunch,
shouldHandleHelpOnlyAtEntry,
} from './main-entry-runtime';
test('normalizeStartupArgv defaults no-arg startup to --start --background', () => {
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage'], {}), [
'SubMiner.AppImage',
'--start',
'--background',
]);
assert.deepEqual(
normalizeStartupArgv(['SubMiner.AppImage', '--password-store', 'gnome-libsecret'], {}),
['SubMiner.AppImage', '--password-store', 'gnome-libsecret', '--start', '--background'],
);
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--background'], {}), [
'SubMiner.AppImage',
'--background',
'--start',
]);
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--help'], {}), [
'SubMiner.AppImage',
'--help',
]);
});
test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], {}), true);
assert.equal(shouldHandleHelpOnlyAtEntry(['--help', '--start'], {}), false);
@@ -14,6 +37,14 @@ test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], { ELECTRON_RUN_AS_NODE: '1' }), false);
});
test('sanitizeStartupEnv suppresses warnings and lsfg layer', () => {
const env = sanitizeStartupEnv({
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
});
assert.equal(env.NODE_NO_WARNINGS, '1');
assert.equal('VK_INSTANCE_LAYERS' in env, false);
});
test('sanitizeHelpEnv suppresses warnings and lsfg layer', () => {
const env = sanitizeHelpEnv({
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',

Some files were not shown because too many files have changed in this diff Show More