mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-04-11 04:19:26 -07:00
Compare commits
14 Commits
v0.4.0
...
40521e769d
| Author | SHA1 | Date | |
|---|---|---|---|
|
40521e769d
|
|||
|
2f31227471
|
|||
|
69fd69c0b2
|
|||
|
746696b1a4
|
|||
|
ebe9515486
|
|||
|
8c2c950564
|
|||
|
e2b51c6306
|
|||
|
f160ca6af8
|
|||
|
289486a5b1
|
|||
|
ac4fd60098
|
|||
|
72b18110b5
|
|||
|
c791887d5c
|
|||
|
8570b262e4
|
|||
|
33ded3c1bf
|
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@@ -31,8 +31,7 @@ jobs:
|
|||||||
path: |
|
path: |
|
||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
vendor/subminer-yomitan/node_modules
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
|
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
|
|||||||
25
.github/workflows/release.yml
vendored
25
.github/workflows/release.yml
vendored
@@ -31,23 +31,22 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: 22.12.0
|
node-version: 22.12.0
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
vendor/subminer-yomitan/node_modules
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
|
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: bun install --frozen-lockfile
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
- name: Build (TypeScript check)
|
|
||||||
run: bun run typecheck
|
|
||||||
|
|
||||||
- name: Test suite (source)
|
- name: Test suite (source)
|
||||||
run: bun run test:fast
|
run: bun run test:fast
|
||||||
|
|
||||||
@@ -85,11 +84,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22.12.0
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -97,8 +91,7 @@ jobs:
|
|||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
vendor/texthooker-ui/node_modules
|
vendor/texthooker-ui/node_modules
|
||||||
vendor/subminer-yomitan/node_modules
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json') }}
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
@@ -147,11 +140,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22.12.0
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -159,8 +147,7 @@ jobs:
|
|||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
vendor/texthooker-ui/node_modules
|
vendor/texthooker-ui/node_modules
|
||||||
vendor/subminer-yomitan/node_modules
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json') }}
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,7 +5,6 @@ node_modules/
|
|||||||
out/
|
out/
|
||||||
dist/
|
dist/
|
||||||
release/
|
release/
|
||||||
build/yomitan/
|
|
||||||
|
|
||||||
# Launcher build artifact (produced by make build-launcher)
|
# Launcher build artifact (produced by make build-launcher)
|
||||||
/subminer
|
/subminer
|
||||||
@@ -37,4 +36,3 @@ tests/*
|
|||||||
.worktrees/
|
.worktrees/
|
||||||
.codex/*
|
.codex/*
|
||||||
.agents/*
|
.agents/*
|
||||||
docs/*
|
|
||||||
|
|||||||
6
.gitmodules
vendored
6
.gitmodules
vendored
@@ -5,6 +5,6 @@
|
|||||||
[submodule "vendor/yomitan-jlpt-vocab"]
|
[submodule "vendor/yomitan-jlpt-vocab"]
|
||||||
path = vendor/yomitan-jlpt-vocab
|
path = vendor/yomitan-jlpt-vocab
|
||||||
url = https://github.com/stephenmk/yomitan-jlpt-vocab
|
url = https://github.com/stephenmk/yomitan-jlpt-vocab
|
||||||
[submodule "vendor/subminer-yomitan"]
|
[submodule "yomitan-jlpt-vocab"]
|
||||||
path = vendor/subminer-yomitan
|
path = vendor/yomitan-jlpt-vocab
|
||||||
url = https://github.com/ksyasuda/subminer-yomitan
|
url = https://github.com/stephenmk/yomitan-jlpt-vocab
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
<!-- BACKLOG.MD MCP GUIDELINES START -->
|
<!-- BACKLOG.MD MCP GUIDELINES START -->
|
||||||
|
|
||||||
<CRITICAL_INSTRUCTION>
|
<CRITICAL_INSTRUCTION>
|
||||||
@@ -17,6 +16,7 @@ This project uses Backlog.md MCP for all task and project management activities.
|
|||||||
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
|
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
|
||||||
|
|
||||||
These guides cover:
|
These guides cover:
|
||||||
|
|
||||||
- Decision framework for when to create tasks
|
- Decision framework for when to create tasks
|
||||||
- Search-first workflow to avoid duplicates
|
- Search-first workflow to avoid duplicates
|
||||||
- Links to detailed guides for task creation, execution, and finalization
|
- Links to detailed guides for task creation, execution, and finalization
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -98,7 +98,7 @@ ensure-bun:
|
|||||||
@command -v bun >/dev/null 2>&1 || { printf '%s\n' "[ERROR] bun not found"; exit 1; }
|
@command -v bun >/dev/null 2>&1 || { printf '%s\n' "[ERROR] bun not found"; exit 1; }
|
||||||
|
|
||||||
pretty: ensure-bun
|
pretty: ensure-bun
|
||||||
@bun run format:src
|
@bun run format
|
||||||
|
|
||||||
build:
|
build:
|
||||||
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"
|
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"
|
||||||
|
|||||||
39
README.md
39
README.md
@@ -33,7 +33,6 @@ SubMiner is an Electron overlay that sits on top of mpv. It turns your video pla
|
|||||||
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
||||||
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
||||||
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
||||||
- **Annotated websocket API** — Dedicated annotation feed can serve bundled texthooker or external clients with rendered `sentence` HTML plus structured `tokens`
|
|
||||||
- **Jellyfin integration** — Remote playback setup, cast device mode, and direct playback launch
|
- **Jellyfin integration** — Remote playback setup, cast device mode, and direct playback launch
|
||||||
- **AniList progress** — Track episode completion and push watching progress automatically
|
- **AniList progress** — Track episode completion and push watching progress automatically
|
||||||
|
|
||||||
@@ -54,36 +53,30 @@ chmod +x ~/.local/bin/subminer
|
|||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
|
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
|
||||||
|
|
||||||
**From source** or **macOS** — initialize submodules first (`git submodule update --init --recursive`). Source builds now also require Node.js 22 + npm because bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
|
**From source** or **macOS** — see the [installation guide](https://docs.subminer.moe/installation#from-source).
|
||||||
|
|
||||||
### 2. Launch the app once
|
### 2. Install the mpv plugin and configuration file
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
SubMiner.AppImage
|
wget https://github.com/ksyasuda/SubMiner/releases/latest/download/subminer-assets.tar.gz -O /tmp/subminer-assets.tar.gz
|
||||||
|
tar -xzf /tmp/subminer-assets.tar.gz -C /tmp
|
||||||
|
mkdir -p ~/.config/mpv/scripts/subminer
|
||||||
|
mkdir -p ~/.config/mpv/script-opts
|
||||||
|
cp -R /tmp/plugin/subminer/. ~/.config/mpv/scripts/subminer/
|
||||||
|
cp /tmp/plugin/subminer.conf ~/.config/mpv/script-opts/
|
||||||
|
mkdir -p ~/.config/SubMiner && cp /tmp/config.example.jsonc ~/.config/SubMiner/config.jsonc
|
||||||
```
|
```
|
||||||
|
|
||||||
On first launch, SubMiner now:
|
### 3. Set up Yomitan Dictionaries
|
||||||
|
|
||||||
- starts in the tray/background
|
```bash
|
||||||
- creates the default config directory and `config.jsonc`
|
subminer app --yomitan
|
||||||
- opens a compact setup popup
|
```
|
||||||
- can install the mpv plugin to the default mpv scripts location for you
|
|
||||||
- links directly to Yomitan settings so you can install dictionaries before finishing setup
|
|
||||||
|
|
||||||
Existing installs that already have a valid config plus at least one Yomitan dictionary are auto-detected as complete and will not be re-prompted.
|
|
||||||
|
|
||||||
### 3. Finish setup
|
|
||||||
|
|
||||||
- click `Install mpv plugin` if you want the default plugin auto-start flow
|
|
||||||
- click `Open Yomitan Settings` and install at least one dictionary
|
|
||||||
- click `Refresh status`
|
|
||||||
- click `Finish setup`
|
|
||||||
|
|
||||||
The mpv plugin step is optional. Yomitan must report at least one installed dictionary before setup can be completed.
|
|
||||||
|
|
||||||
### 4. Mine
|
### 4. Mine
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
subminer app --start --background
|
||||||
subminer video.mkv # default plugin config auto-starts visible overlay + resumes playback when ready
|
subminer video.mkv # default plugin config auto-starts visible overlay + resumes playback when ready
|
||||||
subminer --start video.mkv # optional explicit overlay start when plugin auto_start=no
|
subminer --start video.mkv # optional explicit overlay start when plugin auto_start=no
|
||||||
```
|
```
|
||||||
@@ -92,7 +85,7 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
|
|||||||
|
|
||||||
| Required | Optional |
|
| Required | Optional |
|
||||||
| ------------------------------------------ | -------------------------------------------------- |
|
| ------------------------------------------ | -------------------------------------------------- |
|
||||||
| `bun`, `node` 22, `npm` | |
|
| `bun` | |
|
||||||
| `mpv` with IPC socket | `yt-dlp` |
|
| `mpv` with IPC socket | `yt-dlp` |
|
||||||
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
|
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
|
||||||
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
|
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
|
||||||
@@ -116,7 +109,7 @@ The Bun-managed discovery lanes intentionally exclude a small set of suites that
|
|||||||
|
|
||||||
## Acknowledgments
|
## Acknowledgments
|
||||||
|
|
||||||
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
|
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [texthooker-ui](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [autosubsync-mpv](https://github.com/joaquintorres/autosubsync-mpv). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
project_name: "SubMiner"
|
project_name: 'SubMiner'
|
||||||
default_status: "To Do"
|
default_status: 'To Do'
|
||||||
statuses: ["To Do", "In Progress", "Done"]
|
statuses: ['To Do', 'In Progress', 'Done']
|
||||||
labels: []
|
labels: []
|
||||||
definition_of_done: []
|
definition_of_done: []
|
||||||
date_format: yyyy-mm-dd
|
date_format: yyyy-mm-dd
|
||||||
max_column_width: 20
|
max_column_width: 20
|
||||||
default_editor: "nvim"
|
default_editor: 'nvim'
|
||||||
auto_open_browser: false
|
auto_open_browser: false
|
||||||
default_port: 6420
|
default_port: 6420
|
||||||
remote_operations: true
|
remote_operations: true
|
||||||
@@ -13,4 +13,4 @@ auto_commit: false
|
|||||||
bypass_git_hooks: false
|
bypass_git_hooks: false
|
||||||
check_active_branches: true
|
check_active_branches: true
|
||||||
active_branch_days: 30
|
active_branch_days: 30
|
||||||
task_prefix: "task"
|
task_prefix: 'task'
|
||||||
|
|||||||
@@ -1,49 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-100
|
|
||||||
title: 'Add configurable texthooker startup launch'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-06 23:30'
|
|
||||||
updated_date: '2026-03-07 01:59'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
ordinal: 10000
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
|
|
||||||
Add a config option under `texthooker` to launch the built-in texthooker server automatically when SubMiner starts.
|
|
||||||
|
|
||||||
Scope:
|
|
||||||
|
|
||||||
- Add `texthooker.launchAtStartup`.
|
|
||||||
- Default to `true`.
|
|
||||||
- Start the existing texthooker server during normal app startup when enabled.
|
|
||||||
- Keep `texthooker.openBrowser` as separate behavior.
|
|
||||||
- Add regression coverage and update generated config docs/example.
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
|
|
||||||
- [x] #1 Default config enables automatic texthooker startup.
|
|
||||||
- [x] #2 Config parser accepts valid boolean values and warns on invalid values.
|
|
||||||
- [x] #3 App-ready startup launches texthooker when enabled.
|
|
||||||
- [x] #4 Generated config template/example documents the new option.
|
|
||||||
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
|
|
||||||
Added `texthooker.launchAtStartup` with a default of `true`, wired it through config defaults/validation/template generation, and started the existing texthooker server during app-ready startup without coupling it to browser auto-open behavior.
|
|
||||||
|
|
||||||
Also added regression coverage for config parsing/template output and app-ready dependency wiring, then regenerated the checked-in config example artifacts.
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-101
|
|
||||||
title: Index AniList character alternative names in the character dictionary
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-07 00:00'
|
|
||||||
updated_date: '2026-03-08 00:11'
|
|
||||||
labels:
|
|
||||||
- dictionary
|
|
||||||
- anilist
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- src/main/character-dictionary-runtime.ts
|
|
||||||
- src/main/character-dictionary-runtime.test.ts
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Index AniList character alternative names in generated character dictionaries so aliases like Shadow resolve during subtitle lookup instead of falling through to unrelated generic dictionary entries.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Character fetch reads AniList alternative character names needed for lookup coverage
|
|
||||||
- [x] #2 Generated term banks include alias-derived terms for subtitle lookups like シャドウ
|
|
||||||
- [x] #3 Regression coverage proves alternative-name indexing works end to end
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Character dictionary generation now requests AniList `name.alternative`, indexes those aliases as term candidates, and expands mixed aliases like `Minoru Kagenou (影野ミノル)` into usable outer/inner variants. Also extended kana alias synthesis so the AniList alias `Shadow` emits `シャドウ`, which matches the subtitle token the user hit in The Eminence in Shadow.
|
|
||||||
|
|
||||||
Bumped the character-dictionary snapshot format to invalidate stale cached snapshots, and updated merged-dictionary rebuilds to refresh invalid snapshots before composing the ZIP so old cache files do not hard-fail the merge path.
|
|
||||||
|
|
||||||
Verified with `bun test src/main/character-dictionary-runtime.test.ts` and `bun run tsc --noEmit`.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-102
|
|
||||||
title: Quiet default AppImage startup and implicit background launch
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-06 21:20'
|
|
||||||
updated_date: '2026-03-06 21:33'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/main-entry-runtime.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/core/services/cli-command.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Make the packaged Linux no-arg launch path behave like a quiet background start instead of surfacing startup-only noise.
|
|
||||||
|
|
||||||
Scope:
|
|
||||||
|
|
||||||
- Treat default background entry launches as implicit `--start --background`.
|
|
||||||
- Keep the `--password-store` diagnostic out of normal startup output.
|
|
||||||
- Suppress known startup-only `node:sqlite` and `lsfg-vk` warnings for the entry/background launch path.
|
|
||||||
- Avoid noisy protocol-registration warnings during normal startup when registration is unsupported.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Initial background launch reaches the start path without logging `No running instance. Use --start to launch the app.`
|
|
||||||
- [x] #2 Default startup no longer emits the `Applied --password-store gnome-libsecret` line at normal log levels.
|
|
||||||
- [x] #3 Entry/background launch sanitization suppresses the observed `ExperimentalWarning: SQLite...` and `lsfg-vk ... unsupported configuration version` startup noise.
|
|
||||||
- [x] #4 Regression coverage documents the new startup behavior.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Normalized no-arg/password-store-only entry launches to append implicit `--start --background`, and upgraded `--background`-only entry launches to include `--start`.
|
|
||||||
|
|
||||||
Applied shared entry env sanitization before loading the main process so default startup strips the `lsfg-vk` Vulkan layer and sets `NODE_NO_WARNINGS=1`; background children keep the same sanitized env.
|
|
||||||
|
|
||||||
Downgraded startup-only protocol-registration failure logging to debug, and routed the Linux password-store diagnostic through the scoped debug logger instead of raw console output.
|
|
||||||
|
|
||||||
Verification:
|
|
||||||
|
|
||||||
- `bun test src/main-entry-runtime.test.ts src/main/runtime/anilist-setup-protocol.test.ts src/main/runtime/anilist-setup-protocol-main-deps.test.ts`
|
|
||||||
- `bun run test:fast`
|
|
||||||
|
|
||||||
Note: the final `node --experimental-sqlite --test dist/main/runtime/registry.test.js` step in `bun run test:fast` still prints Node's own experimental SQLite warning because that test command explicitly enables the feature flag outside the app entrypoint.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Default packaged startup is now quiet and behaves like an implicit `--start --background` launch.
|
|
||||||
|
|
||||||
- No-arg AppImage entry launches now append `--start --background`, and `--background`-only launches append the missing `--start`.
|
|
||||||
- Entry/background startup sanitization now suppresses the observed `lsfg-vk` and `node:sqlite` warnings on the app launch path.
|
|
||||||
- Linux password-store and unsupported protocol-registration diagnostics now stay at debug level instead of normal startup output.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-103
|
|
||||||
title: Add dedicated annotation websocket for texthooker
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-07 02:20'
|
|
||||||
updated_date: '2026-03-07 02:20'
|
|
||||||
labels:
|
|
||||||
- texthooker
|
|
||||||
- websocket
|
|
||||||
- subtitle
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Add a separate annotated subtitle websocket for bundled texthooker so token/JLPT/frequency markup is available on a stable dedicated port even when the regular websocket is in `auto` mode and skipped because `mpv_websocket` is installed.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Regular `websocket.enabled: "auto"` behavior remains unchanged and still skips the regular websocket when `mpv_websocket` is installed.
|
|
||||||
- [x] #2 A separate `annotationWebsocket` config controls an independent annotated websocket with default port `6678`.
|
|
||||||
- [x] #3 Bundled texthooker is pointed at the annotation websocket when it is enabled.
|
|
||||||
- [x] #4 Focused regression tests cover config parsing, startup wiring, and texthooker bootstrap injection.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Added `annotationWebsocket.enabled`/`annotationWebsocket.port` with defaults of `true`/`6678`, started that websocket independently from the regular auto-managed websocket, and injected the bundled texthooker websocket URL so it connects to the annotation feed by default.
|
|
||||||
|
|
||||||
Also added focused regression coverage and regenerated the checked-in config examples.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-104
|
|
||||||
title: Mirror overlay annotation hover behavior in vendored texthooker
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-06 21:45'
|
|
||||||
updated_date: '2026-03-06 21:45'
|
|
||||||
labels:
|
|
||||||
- texthooker
|
|
||||||
- subtitle
|
|
||||||
- websocket
|
|
||||||
dependencies:
|
|
||||||
- TASK-103
|
|
||||||
references:
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/core/services/subtitle-ws.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/components/App.svelte
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/line-markup.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/app.css
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Bring bundled texthooker annotation rendering closer to the visible overlay. Keep the lightweight texthooker UX, but preserve token metadata for hover, match overlay color-precedence rules across known/N+1/name/frequency/JLPT, expose name-match highlighting as a toggle, and emit a structured annotation payload on the dedicated websocket so non-SubMiner clients can treat it as an API.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Annotation websocket payload includes both rendered `sentence` HTML and structured token metadata for generic clients.
|
|
||||||
- [x] #2 Vendored texthooker preserves annotation metadata attrs needed for hover labels and uses overlay-matching color precedence rules.
|
|
||||||
- [x] #3 Vendored texthooker supports character-name highlighting with a user-facing toggle and standalone-web note.
|
|
||||||
- [x] #4 Hovering annotated texthooker tokens reveals JLPT/frequency metadata without adding the full overlay popup workflow.
|
|
||||||
- [x] #5 Focused serializer, texthooker markup, socket parsing, CSS, and build verification pass.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Extended the dedicated annotation websocket payload to ship `version`, plain `text`, rendered `sentence`, and structured `tokens` metadata while keeping backward-compatible `sentence` consumers working. Updated the vendored texthooker to preserve hover metadata attrs, follow overlay color precedence for known/N+1/name/frequency/JLPT annotations, add a character-name highlight toggle plus standalone-web dictionary note, and render lightweight hover labels for frequency/JLPT metadata. Added focused regression coverage and rebuilt both the vendored texthooker bundle and SubMiner.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-105
|
|
||||||
title: Stop local docs artifact writes after docs repo split
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-07 00:00'
|
|
||||||
updated_date: '2026-03-07 00:20'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
ordinal: 10500
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
|
|
||||||
Now that user-facing docs live in `../subminer-docs`, first-party scripts in this repo should not keep writing generated artifacts into the local `docs/` tree.
|
|
||||||
|
|
||||||
Scope:
|
|
||||||
|
|
||||||
- Audit first-party scripts/automation for writes to `docs/`.
|
|
||||||
- Keep repo-local outputs only where they are still intentionally owned by this repo.
|
|
||||||
- Repoint generated docs artifacts to `../subminer-docs` when that is the maintained source of truth.
|
|
||||||
- Add regression coverage for the config-example generation path contract.
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
|
|
||||||
- [x] #1 The config-example generator no longer writes to `docs/public/config.example.jsonc` inside this repo.
|
|
||||||
- [x] #2 When `../subminer-docs` exists, the generator updates `../subminer-docs/public/config.example.jsonc`.
|
|
||||||
- [x] #3 Automated coverage guards the output-path contract so local docs writes do not regress.
|
|
||||||
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
|
|
||||||
Removed the first-party local `docs/public` config-example write path from `src/generate-config-example.ts` and replaced it with sibling-docs-repo detection that targets `../subminer-docs/public/config.example.jsonc` only when that repo exists.
|
|
||||||
|
|
||||||
Added a project-local regression suite for output-path resolution and artifact writing, wired that suite into the maintained config test lane, and removed the stale generated `docs/public/config.example.jsonc` artifact from the working tree.
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-106
|
|
||||||
title: Add first-run setup gate and auto-install flow
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-07 06:10'
|
|
||||||
updated_date: '2026-03-07 06:20'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/shared/setup-state.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/first-run-setup-service.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/first-run-setup-window.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
|
|
||||||
priority: high
|
|
||||||
ordinal: 10600
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Replace the current manual install flow with a first-run setup gate:
|
|
||||||
|
|
||||||
- bootstrap the default config dir/config file automatically
|
|
||||||
- detect legacy installs and mark them complete when config + Yomitan dictionaries are already present
|
|
||||||
- open a compact Catppuccin Macchiato setup popup for incomplete installs
|
|
||||||
- optionally install the mpv plugin into the default mpv location
|
|
||||||
- block launcher playback until setup completes, then resume the original playback flow
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 First app launch seeds the default config dir/config file without manual copy steps.
|
|
||||||
- [x] #2 Existing installs with config plus at least one Yomitan dictionary are auto-detected as already complete.
|
|
||||||
- [x] #3 Incomplete installs get a first-run setup popup with mpv plugin install, Yomitan settings, refresh, skip, and finish actions.
|
|
||||||
- [x] #4 Launcher playback waits for setup completion and does not start mpv while setup is incomplete.
|
|
||||||
- [x] #5 Plugin assets are packaged into the Electron bundle and regression tests cover the new flow.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Added shared setup-state/config/mpv path helpers so Electron and launcher read the same onboarding state file.
|
|
||||||
|
|
||||||
Introduced a first-run setup service plus compact BrowserWindow popup using Catppuccin Macchiato styling. The popup supports optional mpv plugin install, opening Yomitan settings, status refresh, skip-plugin, and gated finish once at least one Yomitan dictionary is installed.
|
|
||||||
|
|
||||||
Electron startup now bootstraps a default config file, auto-detects legacy-complete installs, adds `--setup` CLI support, exposes a tray `Complete Setup` action while incomplete, and avoids reopening setup once completion is recorded.
|
|
||||||
|
|
||||||
Launcher playback now checks the shared setup-state file before starting mpv. If setup is incomplete, it launches the app with `--background --setup`, waits for completion, and only then proceeds.
|
|
||||||
|
|
||||||
Verification:
|
|
||||||
|
|
||||||
- `bun run typecheck`
|
|
||||||
- `bun run test:fast`
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
SubMiner now supports a download-and-launch install flow.
|
|
||||||
|
|
||||||
- First launch auto-creates config and opens setup only when needed.
|
|
||||||
- Existing users with working installs are silently migrated to completed setup.
|
|
||||||
- The setup popup handles optional mpv plugin install and Yomitan dictionary readiness.
|
|
||||||
- Launcher playback is gated on setup completion and resumes automatically afterward.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-107
|
|
||||||
title: 'Fix Yomitan scan-token fallback fragmentation on exact-source misses'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-07 01:10'
|
|
||||||
updated_date: '2026-03-07 01:12'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
ordinal: 9007
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
|
|
||||||
Left-to-right Yomitan scanning can emit bogus fallback tokens when `termsFind` returns entries but none of their headwords carries an exact primary source for the consumed substring. Repro: `だが それでも届かぬ高みがあった` currently yields trailing fragments like `があ` / `た`, which blocks the real `あった` token from receiving frequency highlighting.
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
|
|
||||||
- [x] #1 Scanner skips `termsFind` fallback entries that are not backed by an exact primary source for the consumed substring.
|
|
||||||
- [x] #2 Repro line no longer yields bogus trailing fragments such as `があ`.
|
|
||||||
- [x] #3 Regression coverage added for the scan-token path.
|
|
||||||
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
|
|
||||||
Removed the scan-token helper fallback that previously emitted a token from the first returned headword even when Yomitan did not report an exact primary source for the consumed substring. Added a focused regression test covering `だが それでも届かぬ高みがあった`, ensuring bogus `があ` fragmentation is skipped so the later `あった` exact match can still be tokenized and highlighted.
|
|
||||||
|
|
||||||
Verification:
|
|
||||||
|
|
||||||
- `bun test src/core/services/tokenizer/yomitan-parser-runtime.test.ts src/core/services/tokenizer.test.ts --timeout 20000`
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-108
|
|
||||||
title: 'Exclude single kana tokens from frequency highlighting'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-07 01:18'
|
|
||||||
updated_date: '2026-03-07 01:22'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
ordinal: 9008
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
|
|
||||||
Suppress frequency highlighting for single-character hiragana or katakana tokens. Scope is frequency-only: known/N+1/JLPT behavior stays unchanged.
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
|
|
||||||
- [x] #1 Single-character hiragana tokens do not retain `frequencyRank`.
|
|
||||||
- [x] #2 Single-character katakana tokens do not retain `frequencyRank`.
|
|
||||||
- [x] #3 Regression coverage exists at annotation-stage and tokenizer levels.
|
|
||||||
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
|
|
||||||
Added a frequency-only suppression rule for single-character kana tokens based on token `surface`, so bogus merged fragments like `た` and standalone one-character kana no longer keep `frequencyRank`. Regression coverage now exists both in the annotation stage and in the tokenizer path, while multi-character tokens and N+1/JLPT behavior remain unchanged.
|
|
||||||
|
|
||||||
Verification:
|
|
||||||
|
|
||||||
- `bun test src/core/services/tokenizer/annotation-stage.test.ts --timeout 20000`
|
|
||||||
- `bun test src/core/services/tokenizer.test.ts --timeout 20000`
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-110
|
|
||||||
title: Replace vendored Yomitan with submodule-built Chrome artifact workflow
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-07 11:05'
|
|
||||||
updated_date: '2026-03-07 11:22'
|
|
||||||
labels:
|
|
||||||
- yomitan
|
|
||||||
- build
|
|
||||||
- release
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
ordinal: 9010
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
|
|
||||||
Replace the checked-in `vendor/yomitan` release tree with a `subminer-yomitan` git submodule. Build Yomitan from source, extract the Chromium zip artifact into a stable local build directory, and make SubMiner dev/runtime/tests/release packaging load that extracted extension instead of the source tree or vendored files.
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
|
|
||||||
- [x] #1 Repo tracks Yomitan as a git submodule instead of committed extension files under `vendor/yomitan`.
|
|
||||||
- [x] #2 SubMiner has a reproducible build/extract step that produces a local Chromium extension directory from `subminer-yomitan`.
|
|
||||||
- [x] #3 Dev/runtime/tests resolve the extracted build output as the default Yomitan extension path.
|
|
||||||
- [x] #4 Release packaging includes the extracted Chromium extension files instead of the old vendored tree.
|
|
||||||
- [x] #5 Docs and verification commands reflect the new workflow.
|
|
||||||
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
|
|
||||||
Replaced the checked-in `vendor/yomitan` extension tree with a `vendor/subminer-yomitan` git submodule and added a reproducible `bun run build:yomitan` workflow that builds `yomitan-chrome.zip`, extracts it into `build/yomitan`, and reuses a source-state stamp to skip redundant rebuilds. Runtime path resolution, helper CLIs, Yomitan integration tests, packaging, CI cache keys, and README source-build notes now all target that generated artifact instead of the old vendored files.
|
|
||||||
|
|
||||||
Verification:
|
|
||||||
|
|
||||||
- `bun run build:yomitan`
|
|
||||||
- `bun test src/core/services/yomitan-extension-paths.test.ts src/core/services/yomitan-structured-content-generator.test.ts src/yomitan-translator-sort.test.ts`
|
|
||||||
- `bun run typecheck`
|
|
||||||
- `bun run build`
|
|
||||||
- `bun run test:core:src`
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-111
|
|
||||||
title: Fix subtitle-cycle OSD labels for J keybindings
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- Codex
|
|
||||||
created_date: '2026-03-07 23:45'
|
|
||||||
updated_date: '2026-03-08 00:06'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.ts
|
|
||||||
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv.ts
|
|
||||||
- >-
|
|
||||||
/Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.test.ts
|
|
||||||
- >-
|
|
||||||
/Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv-control.test.ts
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
When cycling subtitle tracks with the default J/Shift+J keybindings, the mpv OSD currently shows raw template text like `${sid}` instead of a resolved subtitle label. Update the keybinding OSD behavior so users see the active subtitle selection clearly when cycling tracks, and ensure placeholder-based OSD messages sent through the mpv client API render correctly.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Pressing the primary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${sid}` placeholder.
|
|
||||||
- [x] #2 Pressing the secondary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${secondary-sid}` placeholder.
|
|
||||||
- [x] #3 Proxy OSD messages that rely on mpv property expansion render resolved values when sent through the mpv client API.
|
|
||||||
- [x] #4 Regression tests cover the subtitle-cycle OSD behavior and the placeholder-expansion OSD path.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Add focused failing tests for subtitle-cycle OSD labels and mpv placeholder-expansion behavior.
|
|
||||||
2. Update the IPC mpv command handler to resolve primary and secondary subtitle track labels from mpv `track-list` data after cycling subtitle tracks.
|
|
||||||
3. Update the mpv OSD runtime path so placeholder-based `show-text` messages sent through the client API opt into property expansion.
|
|
||||||
4. Run focused tests, then the relevant core test lane, and record results in the task notes.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Initial triage: `ipc-command.ts` emits raw `${sid}`/`${secondary-sid}` placeholder strings, and `showMpvOsdRuntime` sends `show-text` via mpv client API without enabling property expansion.
|
|
||||||
|
|
||||||
User approved implementation plan on 2026-03-07.
|
|
||||||
|
|
||||||
Implementation: proxy mpv command OSD now supports an async resolver so subtitle track cycling can show human-readable labels instead of raw `${sid}` placeholders.
|
|
||||||
|
|
||||||
Implementation: `showMpvOsdRuntime` now prefixes placeholder-based messages with mpv client-api `expand-properties`, which fixes raw `${...}` OSD output for subtitle delay/position messages.
|
|
||||||
|
|
||||||
Testing: `bun test src/core/services/ipc-command.test.ts src/core/services/mpv-control.test.ts src/main/runtime/mpv-proxy-osd.test.ts src/main/runtime/ipc-mpv-command-main-deps.test.ts src/main/runtime/ipc-bridge-actions.test.ts src/main/runtime/ipc-bridge-actions-main-deps.test.ts src/main/runtime/composers/ipc-runtime-composer.test.ts` passed.
|
|
||||||
|
|
||||||
Testing: `bun x tsc --noEmit` passed.
|
|
||||||
|
|
||||||
Testing: `bun run test:core:src` passed (423 pass, 6 skip, 0 fail).
|
|
||||||
|
|
||||||
Docs: no update required because no checked-in docs or help text describe the J/Shift+J OSD output behavior.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Fixed subtitle-cycle OSD handling for the default J/Shift+J keybindings. The IPC mpv command path now supports resolving proxy OSD text asynchronously, and the main-runtime resolver reads mpv `track-list` state so primary and secondary subtitle cycling show human-readable track labels instead of raw `${sid}` / `${secondary-sid}` placeholders.
|
|
||||||
|
|
||||||
Also fixed the lower-level mpv OSD transport so placeholder-based `show-text` messages sent through the client API opt into `expand-properties`. That preserves existing template-based OSD messages like subtitle delay and subtitle position without leaking the raw `${...}` syntax.
|
|
||||||
|
|
||||||
Added regression coverage for the async proxy OSD path, the placeholder-expansion `showMpvOsdRuntime` path, and the runtime subtitle-track label resolver. Verification run: `bun x tsc --noEmit`; focused mpv/IPC tests; and the maintained `bun run test:core:src` lane (423 pass, 6 skip, 0 fail).
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-112
|
|
||||||
title: Address Claude review items on PR 15
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-08 00:11'
|
|
||||||
updated_date: '2026-03-08 00:12'
|
|
||||||
labels:
|
|
||||||
- pr-review
|
|
||||||
- ci
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- .github/workflows/release.yml
|
|
||||||
- .github/workflows/ci.yml
|
|
||||||
- .gitmodules
|
|
||||||
- >-
|
|
||||||
backlog/tasks/task-101 -
|
|
||||||
Index-AniList-character-alternative-names-in-the-character-dictionary.md
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Review Claude's PR feedback on PR #15, implement only the technically valid fixes on the current branch, and document which comments are non-actionable or already acceptable.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Validated Claude's concrete PR review items against current branch state and repo conventions
|
|
||||||
- [x] #2 Implemented the accepted fixes with regression coverage or verification where applicable
|
|
||||||
- [x] #3 Documented which review items are non-blocking or intentionally left unchanged
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Validate each Claude review item against current branch files and repo workflow.
|
|
||||||
2. Patch release quality-gate to match CI ordering and add explicit typecheck.
|
|
||||||
3. Remove duplicate .gitmodules stanza and normalize the TASK-101 reference path through Backlog MCP.
|
|
||||||
4. Run relevant verification for workflow/config metadata changes and record which review items remain non-actionable.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
User asked to address Claude PR comments on PR #15 and assess whether any action items remain. Treat review suggestions skeptically; only fix validated defects.
|
|
||||||
|
|
||||||
Validated Claude's five review items. Fixed release workflow ordering/typecheck, removed the duplicate .gitmodules entry, and normalized TASK-101 references to repo-relative paths via Backlog MCP.
|
|
||||||
|
|
||||||
Left the vendor/subminer-yomitan branch-pin suggestion unchanged. The committed submodule SHA already controls reproducibility; adding a branch would only affect update ergonomics and was not required to address a concrete defect.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Validated Claude's PR #15 review summary against the current branch and applied the actionable fixes. In `.github/workflows/release.yml`, the release `quality-gate` job now restores the dependency cache before installation, no longer installs twice, and runs `bun run typecheck` before the fast test suite to match CI expectations. In `.gitmodules`, removed the duplicate `vendor/yomitan-jlpt-vocab` stanza with the conflicting duplicate path. Through Backlog MCP, updated `TASK-101` references from an absolute local path to repo-relative paths so the task metadata is portable across contributors.
|
|
||||||
|
|
||||||
Verification: `git diff --check`, `git config -f .gitmodules --get-regexp '^submodule\..*\.path$'`, `bun run typecheck`, and `bun run test:fast` all passed. `bun run format:check` still fails on many pre-existing unrelated files already present on the branch, including multiple backlog task files and existing source/docs files; this review patch did not attempt a repo-wide formatting sweep.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-113
|
|
||||||
title: Scope make pretty to maintained source files
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-08 00:20'
|
|
||||||
updated_date: '2026-03-08 00:22'
|
|
||||||
labels:
|
|
||||||
- tooling
|
|
||||||
- formatting
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- Makefile
|
|
||||||
- package.json
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Change the `make pretty` workflow so it formats only the maintained source/config files we intentionally keep under Prettier, instead of sweeping backlog/docs/generated content across the whole repository.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 `make pretty` formats only the approved maintained source/config paths
|
|
||||||
- [x] #2 The allowlist is reusable for check/write flows instead of duplicating path logic
|
|
||||||
- [x] #3 Verification shows the scoped formatting command targets the intended files without touching backlog or vendored content
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Inspect current Prettier config/ignore behavior and keep the broad repo-wide format command unchanged.
|
|
||||||
2. Add a reusable scoped Prettier script that targets maintained source/config paths only.
|
|
||||||
3. Update `make pretty` to call the scoped script.
|
|
||||||
4. Verify the scoped command resolves only intended files and does not traverse backlog or vendor paths.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
User approved the allowlist approach: keep repo-wide `format` intact, make `make pretty` use a maintained-path formatter scope.
|
|
||||||
|
|
||||||
Added `scripts/prettier-scope.sh` as the single allowlist for scoped Prettier paths and wired `format:src` / `format:check:src` to it.
|
|
||||||
|
|
||||||
Updated `make pretty` to call `bun run format:src`. Verified with `make -n pretty` and shell tracing that the helper only targets the maintained allowlist and does not traverse `backlog/` or `vendor/`.
|
|
||||||
|
|
||||||
Excluded `Makefile` and `.prettierignore` from the allowlist after verification showed Prettier cannot infer parsers for them.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Scoped the repo's day-to-day formatting entrypoint without changing the existing broad repo-wide Prettier scripts. Added `scripts/prettier-scope.sh` as the shared allowlist for maintained source/config paths (`.github`, `build`, `launcher`, `scripts`, `src`, plus selected root JSON config files), added `format:src` and `format:check:src` in `package.json`, and updated `make pretty` to run the scoped formatter.
|
|
||||||
|
|
||||||
Verification: `make -n pretty` now resolves to `bun run format:src`. `bash -n scripts/prettier-scope.sh` passed, and shell-traced `bash -x scripts/prettier-scope.sh --check` confirmed the exact allowlist passed to Prettier. `bun run format:check:src` fails only because existing files inside the allowed source scope are not currently formatted; it no longer touches `backlog/` or `vendor/`.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-114
|
|
||||||
title: Fix failing CI checks on PR 15
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-08 00:34'
|
|
||||||
updated_date: '2026-03-08 00:37'
|
|
||||||
labels:
|
|
||||||
- ci
|
|
||||||
- test
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- src/renderer/subtitle-render.test.ts
|
|
||||||
- src/renderer/style.css
|
|
||||||
- .github/workflows/ci.yml
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Investigate the failing GitHub Actions CI run for PR #15 on branch `yomitan-fork`, fix the underlying test or code regression, and verify the affected local test/CI lane passes.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Identified the concrete failing CI job and captured the relevant failure context
|
|
||||||
- [x] #2 Implemented the minimal code or test change needed to resolve the CI failure
|
|
||||||
- [x] #3 Verified the affected local test target and the broader fast CI test lane pass
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Inspect the failing GitHub Actions run and confirm the exact failing test/assertion.
|
|
||||||
2. Reproduce the failing renderer stylesheet test locally and compare the assertion against current CSS.
|
|
||||||
3. Apply the minimal test or stylesheet fix needed to restore the intended hover/selection behavior.
|
|
||||||
4. Re-run the targeted renderer test, then re-run `bun run test` to verify the fast CI lane is green.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
GitHub Actions run 22810400921 failed in job build-test-audit, step `Test suite (source)`, with a single failing test: `JLPT CSS rules use underline-only styling in renderer stylesheet` in src/renderer/subtitle-render.test.ts.
|
|
||||||
|
|
||||||
Reproduced the failing test locally with `bun test src/renderer/subtitle-render.test.ts`. The failure was a brittle stylesheet assertion, not a renderer behavior regression.
|
|
||||||
|
|
||||||
Updated the renderer stylesheet test helper to split selectors safely across `:is(...)` commas and normalize multiline selector whitespace, then switched the failing hover/JLPT assertions to inspect extracted rule blocks instead of matching the entire CSS file text.
|
|
||||||
|
|
||||||
Verification passed with `bun test src/renderer/subtitle-render.test.ts` and `bun run test`.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Investigated GitHub Actions CI run `22810400921` for PR #15 and confirmed the only failing job was `build-test-audit`, step `Test suite (source)`, with a single failure in `src/renderer/subtitle-render.test.ts` (`JLPT CSS rules use underline-only styling in renderer stylesheet`).
|
|
||||||
|
|
||||||
The renderer CSS itself was still correct; the regression was in the test helper. `extractClassBlock` was splitting selector lists on every comma, which breaks selectors containing `:is(...)`, and the affected assertions fell back to brittle whole-file regex matching against a multiline selector. Fixed the test by teaching the helper to split selectors only at top-level commas, normalizing selector whitespace around multiline `:not(...)` / `:is(...)` clauses, and asserting on extracted rule blocks for the plain-word hover and JLPT-only hover/selection rules.
|
|
||||||
|
|
||||||
Verification: `bun test src/renderer/subtitle-render.test.ts` passed, and `bun run test` passed end to end (the same fast lane that failed in CI).
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-115
|
|
||||||
title: Refresh subminer-docs contributor docs for current repo workflow
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-08 00:40'
|
|
||||||
updated_date: '2026-03-08 00:42'
|
|
||||||
labels:
|
|
||||||
- docs
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- ../subminer-docs/development.md
|
|
||||||
- ../subminer-docs/README.md
|
|
||||||
- Makefile
|
|
||||||
- package.json
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Update the sibling `subminer-docs` repo so contributor/development docs match the current SubMiner repo workflow after the docs split and recent tooling changes, including removing stale in-repo docs build steps and documenting the scoped formatting command.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Contributor docs in `subminer-docs` no longer reference stale in-repo docs build commands for the app repo
|
|
||||||
- [x] #2 Contributor docs mention the current scoped formatting workflow (`make pretty` / `format:src`) where relevant
|
|
||||||
- [x] #3 Removed stale or no-longer-needed instructions that no longer match the current repo layout
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Inspect `subminer-docs` for contributor/development instructions that drifted after the docs repo split and recent tooling changes.
|
|
||||||
2. Update contributor docs to remove stale app-repo docs commands and document the current scoped formatting workflow.
|
|
||||||
3. Verify the modified docs page and build the docs site from the sibling docs repo when local dependencies are available.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Detected concrete doc drift in `subminer-docs/development.md`: stale in-repo docs build commands and no mention of the scoped `make pretty` formatter.
|
|
||||||
|
|
||||||
Updated `../subminer-docs/development.md` to remove stale app-repo docs build steps from the local gate, document `make pretty` / `format:check:src`, and point docs-site work to the sibling docs repo explicitly.
|
|
||||||
|
|
||||||
Installed docs repo dependencies locally with `bun install` and verified the docs site with `bun run docs:build` in `../subminer-docs`.
|
|
||||||
|
|
||||||
Did not change `../subminer-docs/README.md`; it was already accurate for the docs repo itself.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Refreshed the contributor/development docs in the sibling `subminer-docs` repo to match the current SubMiner workflow. In `development.md`, removed the stale app-repo `bun run docs:build` step from the local CI-equivalent gate, added an explicit note to run docs builds from `../subminer-docs` when docs change, documented the scoped formatting workflow (`make pretty` and `bun run format:check:src`), and replaced the old in-repo `make docs*` instructions with the correct sibling-repo `bun run docs:*` commands. Also updated the Makefile reference to include `make pretty` and removed the obsolete `make docs-dev` entry.
|
|
||||||
|
|
||||||
Verification: installed docs repo dependencies with `bun install` in `../subminer-docs` and ran `bun run docs:build` successfully. Left `README.md` unchanged because it was already accurate for the standalone docs repo.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-116
|
|
||||||
title: Audit branch commits for remaining subminer-docs updates
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-08 00:46'
|
|
||||||
updated_date: '2026-03-08 00:48'
|
|
||||||
labels:
|
|
||||||
- docs
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- ../subminer-docs/installation.md
|
|
||||||
- ../subminer-docs/troubleshooting.md
|
|
||||||
- src/core/services/yomitan-extension-paths.ts
|
|
||||||
- scripts/build-yomitan.mjs
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Review recent `yomitan-fork` commits against the sibling `subminer-docs` repo, identify any concrete documentation drift that remains after the earlier contributor-doc updates, and patch the docs for behavior/tooling changes that are now outdated or misleading.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Reviewed recent branch commits for user-facing or contributor-facing changes that may require docs updates
|
|
||||||
- [x] #2 Updated `subminer-docs` pages where branch changes introduced concrete doc drift
|
|
||||||
- [x] #3 Verified the docs site still builds after the updates
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Review branch commit themes against `subminer-docs` and identify only concrete drift introduced by recent workflow/runtime changes.
|
|
||||||
2. Patch docs for the Yomitan submodule build workflow, updated source-build prerequisites, and current runtime Yomitan search paths/manual fallback path.
|
|
||||||
3. Rebuild the docs site to verify the updated pages render cleanly.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Concrete remaining drift after commit audit: installation/development docs still understate the Node/npm + submodule requirements for the Yomitan build flow, and troubleshooting still points at obsolete `vendor/yomitan` / `extensions/yomitan` paths.
|
|
||||||
|
|
||||||
Audited branch commits against subminer-docs coverage. Existing docs already cover first-run setup, texthooker startup/annotated websocket config, AniList merged character dictionaries, configurable collapsible sections, and subtitle name highlighting. Patched remaining drift around source-build prerequisites and Yomitan build/install paths in installation.md, development.md, and troubleshooting.md. Verified with `bun run docs:build` in ../subminer-docs.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Audited branch commits for missing documentation updates in ../subminer-docs. Updated installation, development, and troubleshooting docs to match the current Yomitan submodule build flow, source-build prerequisites, and runtime extension search/manual fallback paths. Confirmed other recent branch features were already documented and rebuilt the docs site successfully.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -3,10 +3,10 @@ id: TASK-87
|
|||||||
title: >-
|
title: >-
|
||||||
Codebase health: harden verification and retire dead architecture identified
|
Codebase health: harden verification and retire dead architecture identified
|
||||||
in the March 2026 review
|
in the March 2026 review
|
||||||
status: In Progress
|
status: To Do
|
||||||
assignee: []
|
assignee: []
|
||||||
created_date: '2026-03-06 03:19'
|
created_date: '2026-03-06 03:19'
|
||||||
updated_date: '2026-03-06 11:11'
|
updated_date: '2026-03-06 03:20'
|
||||||
labels:
|
labels:
|
||||||
- tech-debt
|
- tech-debt
|
||||||
- tests
|
- tests
|
||||||
@@ -22,7 +22,6 @@ references:
|
|||||||
- src/translators/index.ts
|
- src/translators/index.ts
|
||||||
- src/subsync/engines.ts
|
- src/subsync/engines.ts
|
||||||
- src/subtitle/pipeline.ts
|
- src/subtitle/pipeline.ts
|
||||||
- backlog/tasks/task-87.5 - Dead-architecture-cleanup-delete-unused-registry-and-pipeline-modules-that-are-off-the-live-path.md
|
|
||||||
documentation:
|
documentation:
|
||||||
- docs/reports/2026-02-22-task-100-dead-code-report.md
|
- docs/reports/2026-02-22-task-100-dead-code-report.md
|
||||||
priority: high
|
priority: high
|
||||||
@@ -70,10 +69,3 @@ Shared review context to restate in child tasks:
|
|||||||
- src/main.ts trips many noUnusedLocals/noUnusedParameters diagnostics.
|
- src/main.ts trips many noUnusedLocals/noUnusedParameters diagnostics.
|
||||||
- src/translators/index.ts, src/subsync/engines.ts, src/subtitle/pipeline.ts, src/tokenizers/index.ts, and src/token-mergers/index.ts appeared unreferenced during review and must be re-verified before deletion.
|
- src/translators/index.ts, src/subsync/engines.ts, src/subtitle/pipeline.ts, src/tokenizers/index.ts, and src/token-mergers/index.ts appeared unreferenced during review and must be re-verified before deletion.
|
||||||
<!-- SECTION:PLAN:END -->
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
## Progress Notes
|
|
||||||
|
|
||||||
- `TASK-87.5` is complete. The isolated dead registry/pipeline modules were re-verified as off the maintained runtime path and removed.
|
|
||||||
- Live subtitle tokenization now owns the zero-width separator normalization that previously only existed in the dead subtitle pipeline path, so the cleanup did not drop that behavior.
|
|
||||||
- Verification completed for the cleanup slice with `bun test src/core/services/tokenizer.test.ts`, `bun test src/dead-architecture-cleanup.test.ts`, `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`, `bun run tsc`, and `bun run test:src`.
|
|
||||||
- Remaining parent-task scope still includes the broader verification hardening, `src/main.ts` dead-symbol cleanup, and `src/anki-integration.ts` decomposition work tracked by the other child tasks.
|
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ id: TASK-87.4
|
|||||||
title: >-
|
title: >-
|
||||||
Runtime composition root: remove dead symbols and tighten module boundaries in
|
Runtime composition root: remove dead symbols and tighten module boundaries in
|
||||||
src/main.ts
|
src/main.ts
|
||||||
status: Done
|
status: To Do
|
||||||
assignee: []
|
assignee: []
|
||||||
created_date: '2026-03-06 03:19'
|
created_date: '2026-03-06 03:19'
|
||||||
updated_date: '2026-03-06 18:10'
|
updated_date: '2026-03-06 03:21'
|
||||||
labels:
|
labels:
|
||||||
- tech-debt
|
- tech-debt
|
||||||
- runtime
|
- runtime
|
||||||
@@ -36,10 +36,10 @@ A noUnusedLocals/noUnusedParameters compile pass reports a large concentration o
|
|||||||
|
|
||||||
<!-- AC:BEGIN -->
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
- [x] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
|
- [ ] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
|
||||||
- [x] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
|
- [ ] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
|
||||||
- [x] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
|
- [ ] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
|
||||||
- [x] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
|
- [ ] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
|
||||||
<!-- AC:END -->
|
<!-- AC:END -->
|
||||||
|
|
||||||
## Implementation Plan
|
## Implementation Plan
|
||||||
@@ -51,13 +51,3 @@ A noUnusedLocals/noUnusedParameters compile pass reports a large concentration o
|
|||||||
3. Keep changes behavior-preserving and avoid mixing unrelated cleanup outside src/main.ts unless required to compile.
|
3. Keep changes behavior-preserving and avoid mixing unrelated cleanup outside src/main.ts unless required to compile.
|
||||||
4. Verify with the updated runtime/startup test commands from TASK-87.1 plus a noUnused compile pass.
|
4. Verify with the updated runtime/startup test commands from TASK-87.1 plus a noUnused compile pass.
|
||||||
<!-- SECTION:PLAN:END -->
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
## Completion Notes
|
|
||||||
|
|
||||||
- Removed the dead import/destructure backlog from `src/main.ts` and deleted stale wrapper seams that no longer owned runtime behavior after the composer/runtime extractions.
|
|
||||||
- Tightened module boundaries so the composition root depends on the composed/public runtime surfaces it actually uses instead of retaining unused lower-level domain factory symbols.
|
|
||||||
- Cleared the remaining strict `noUnusedLocals`/`noUnusedParameters` failures in nearby touched files required for a clean repo-wide pass: `launcher/commands/playback-command.ts`, `src/anki-integration.ts`, `src/anki-integration/field-grouping-workflow.ts`, `src/core/services/tokenizer/yomitan-parser-runtime.test.ts`, and `src/main/runtime/composers/composer-contracts.type-test.ts`.
|
|
||||||
- Verification:
|
|
||||||
- `bunx tsc --noEmit -p tsconfig.typecheck.json --noUnusedLocals --noUnusedParameters --pretty false`
|
|
||||||
- `bun run test:fast`
|
|
||||||
- Commit: `e659b5d` (`refactor(runtime): remove dead symbols from composition roots`)
|
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ id: TASK-87.5
|
|||||||
title: >-
|
title: >-
|
||||||
Dead architecture cleanup: delete unused registry and pipeline modules that
|
Dead architecture cleanup: delete unused registry and pipeline modules that
|
||||||
are off the live path
|
are off the live path
|
||||||
status: Done
|
status: To Do
|
||||||
assignee: []
|
assignee: []
|
||||||
created_date: '2026-03-06 03:20'
|
created_date: '2026-03-06 03:20'
|
||||||
updated_date: '2026-03-06 11:05'
|
updated_date: '2026-03-06 03:21'
|
||||||
labels:
|
labels:
|
||||||
- tech-debt
|
- tech-debt
|
||||||
- dead-code
|
- dead-code
|
||||||
@@ -40,10 +40,10 @@ The review found several modules that appear self-contained but unused from the
|
|||||||
|
|
||||||
<!-- AC:BEGIN -->
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
- [x] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
|
- [ ] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
|
||||||
- [x] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
|
- [ ] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
|
||||||
- [x] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
|
- [ ] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
|
||||||
- [x] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
|
- [ ] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
|
||||||
<!-- AC:END -->
|
<!-- AC:END -->
|
||||||
|
|
||||||
## Implementation Plan
|
## Implementation Plan
|
||||||
@@ -55,10 +55,3 @@ The review found several modules that appear self-contained but unused from the
|
|||||||
3. Pay special attention to subtitle sync and tokenization surfaces, since duplicate architecture exists near active code.
|
3. Pay special attention to subtitle sync and tokenization surfaces, since duplicate architecture exists near active code.
|
||||||
4. Verify the relevant tokenization and subsync commands/tests still pass and update any stale docs or notes.
|
4. Verify the relevant tokenization and subsync commands/tests still pass and update any stale docs or notes.
|
||||||
<!-- SECTION:PLAN:END -->
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
- Traced imports from `src/main.ts`, `src/main/runtime/**`, `src/core/services/subsync-runner.ts`, and `src/core/services/tokenizer.ts`; confirmed the candidate registry/pipeline modules were isolated from the maintained runtime path.
|
|
||||||
- Deleted dead modules: `src/translators/index.ts`, `src/subsync/engines.ts`, `src/subtitle/pipeline.ts`, `src/subtitle/stages/{merge,normalize,tokenize}.ts`, `src/subtitle/stages/normalize.test.ts`, `src/tokenizers/index.ts`, and `src/token-mergers/index.ts`.
|
|
||||||
- Moved the useful zero-width separator normalization into the live tokenizer path in `src/core/services/tokenizer.ts` and added regression coverage plus a repository-level dead-architecture guard in `src/dead-architecture-cleanup.test.ts`.
|
|
||||||
- Verified with `bun test src/core/services/tokenizer.test.ts`, `bun test src/dead-architecture-cleanup.test.ts`, `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`, `bun run tsc`, and `bun run test:src`.
|
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ id: TASK-87.6
|
|||||||
title: >-
|
title: >-
|
||||||
Anki integration maintainability: continue decomposing the oversized
|
Anki integration maintainability: continue decomposing the oversized
|
||||||
orchestration layer
|
orchestration layer
|
||||||
status: Done
|
status: To Do
|
||||||
assignee: []
|
assignee: []
|
||||||
created_date: '2026-03-06 03:20'
|
created_date: '2026-03-06 03:20'
|
||||||
updated_date: '2026-03-06 09:23'
|
updated_date: '2026-03-06 03:21'
|
||||||
labels:
|
labels:
|
||||||
- tech-debt
|
- tech-debt
|
||||||
- anki
|
- anki
|
||||||
@@ -40,10 +40,10 @@ src/anki-integration.ts remains an oversized orchestration file even after earli
|
|||||||
|
|
||||||
<!-- AC:BEGIN -->
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
- [x] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
|
- [ ] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
|
||||||
- [x] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
|
- [ ] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
|
||||||
- [x] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
|
- [ ] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
|
||||||
- [x] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
|
- [ ] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
|
||||||
<!-- AC:END -->
|
<!-- AC:END -->
|
||||||
|
|
||||||
## Implementation Plan
|
## Implementation Plan
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-97
|
|
||||||
title: Add configurable character-name token highlighting
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-06 10:15'
|
|
||||||
updated_date: '2026-03-06 10:15'
|
|
||||||
labels:
|
|
||||||
- subtitle
|
|
||||||
- dictionary
|
|
||||||
- renderer
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer.ts
|
|
||||||
- >-
|
|
||||||
/home/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer/yomitan-parser-runtime.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/renderer/subtitle-render.ts
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Color subtitle tokens that match entries from the SubMiner character dictionary, with a configurable default color and a config toggle that disables both rendering and name-match detection work.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Tokens matched from the SubMiner character dictionary receive dedicated renderer styling.
|
|
||||||
- [x] #2 `subtitleStyle.nameMatchEnabled` disables name-match detection work when false.
|
|
||||||
- [x] #3 `subtitleStyle.nameMatchColor` overrides the default `#f5bde6`.
|
|
||||||
- [x] #4 Regression coverage verifies config parsing, tokenizer propagation, scanner gating, and renderer class/CSS behavior.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Added configurable character-name token highlighting with default color `#f5bde6` and config gate `subtitleStyle.nameMatchEnabled`. When enabled, left-to-right Yomitan scanning tags tokens whose winning dictionary entry comes from the SubMiner character dictionary; when disabled, the tokenizer skips that metadata work and the renderer suppresses name-match styling. Added focused regression tests for config parsing, main-deps wiring, Yomitan scan gating, token propagation, renderer classes, and CSS behavior.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-98
|
|
||||||
title: Gate subtitle character-name highlighting on character dictionary enablement
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- codex
|
|
||||||
created_date: '2026-03-07 00:54'
|
|
||||||
updated_date: '2026-03-07 00:56'
|
|
||||||
labels:
|
|
||||||
- subtitle
|
|
||||||
- character-dictionary
|
|
||||||
dependencies: []
|
|
||||||
references:
|
|
||||||
- /Users/sudacode/projects/japanese/SubMiner/src/main.ts
|
|
||||||
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer.ts
|
|
||||||
- >-
|
|
||||||
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-subtitle.ts
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Ensure subtitle tokenization and other annotations continue to work, but character-name lookup/highlighting is disabled whenever the AniList character dictionary feature is disabled. This avoids unnecessary name-match processing when the backing dictionary is unavailable.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 When anilist.characterDictionary.enabled is false, subtitle tokenization does not request character-name match metadata or highlight character names.
|
|
||||||
- [x] #2 When anilist.characterDictionary.enabled is true and subtitleStyle.nameMatchEnabled is true, existing character-name matching behavior remains enabled.
|
|
||||||
- [x] #3 Subtitle tokenization, JLPT, frequency, and other non-name annotation behavior remain unchanged when character dictionaries are disabled.
|
|
||||||
- [x] #4 Automated tests cover the runtime gating behavior.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
1. Add a failing test in `src/main/runtime/subtitle-tokenization-main-deps.test.ts` proving name-match enablement resolves to false when `anilist.characterDictionary.enabled` is false even if `subtitleStyle.nameMatchEnabled` is true.
|
|
||||||
2. Update `src/main/runtime/subtitle-tokenization-main-deps.ts` and `src/main.ts` so subtitle tokenization only enables name matching when both the subtitle setting and the character dictionary setting are enabled.
|
|
||||||
3. Run focused Bun tests for the updated runtime deps and subtitle processing seams.
|
|
||||||
4. If verification stays green, check off acceptance criteria and record the result.
|
|
||||||
|
|
||||||
Implementation plan saved in `docs/plans/2026-03-06-character-name-gating.md`.
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Created plan doc `docs/plans/2026-03-06-character-name-gating.md` after user approved the narrow runtime-gating approach. Proceeding with TDD from the subtitle tokenization main-deps seam.
|
|
||||||
|
|
||||||
Implemented the gate at the subtitle tokenization runtime-deps boundary so `getNameMatchEnabled` is false unless both `subtitleStyle.nameMatchEnabled` and `anilist.characterDictionary.enabled` are true.
|
|
||||||
|
|
||||||
Verification: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`, `bun test src/core/services/subtitle-processing-controller.test.ts`, `bun run typecheck`.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Character-name lookup/highlighting is now suppressed when the AniList character dictionary is disabled, while subtitle tokenization and other annotation paths remain active. Added focused runtime-deps coverage and wired the main runtime to pass the character-dictionary enabled flag into subtitle tokenization.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
id: TASK-99
|
|
||||||
title: Add configurable character dictionary collapsible section open states
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-03-07 00:00'
|
|
||||||
updated_date: '2026-03-07 00:00'
|
|
||||||
labels:
|
|
||||||
- dictionary
|
|
||||||
- config
|
|
||||||
references:
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/config/resolve/integrations.ts
|
|
||||||
- /home/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-integrations.ts
|
|
||||||
priority: medium
|
|
||||||
dependencies: []
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Add per-section config for character dictionary collapsible glossary sections so Description, Character Information, and Voiced by can each default open or closed independently. Default all sections closed.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Config supports `anilist.characterDictionary.collapsibleSections.description`.
|
|
||||||
- [x] #2 Config supports `anilist.characterDictionary.collapsibleSections.characterInformation`.
|
|
||||||
- [x] #3 Config supports `anilist.characterDictionary.collapsibleSections.voicedBy`.
|
|
||||||
- [x] #4 Default config keeps all generated character dictionary collapsible sections closed.
|
|
||||||
- [x] #5 Regression coverage verifies config parsing/warnings and generated glossary `details.open` behavior.
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Final Summary
|
|
||||||
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
|
||||||
Added per-section open-state config under `anilist.characterDictionary.collapsibleSections` for `description`, `characterInformation`, and `voicedBy`, all defaulting to `false`. Wired the glossary generator to read those settings so generated `details.open` matches config, and added regression coverage for defaults, parsing/warnings, registry exposure, and runtime glossary output.
|
|
||||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
|
||||||
@@ -5,7 +5,6 @@
|
|||||||
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
|
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
|
||||||
*/
|
*/
|
||||||
{
|
{
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Overlay Auto-Start
|
// Overlay Auto-Start
|
||||||
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
|
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
|
||||||
@@ -14,12 +13,11 @@
|
|||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Texthooker Server
|
// Texthooker Server
|
||||||
// Configure texthooker startup launch and browser opening behavior.
|
// Control whether browser opens automatically for texthooker.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"texthooker": {
|
"texthooker": {
|
||||||
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
|
"openBrowser": true, // Open browser setting. Values: true | false
|
||||||
"openBrowser": true // Open browser setting. Values: true | false
|
}, // Control whether browser opens automatically for texthooker.
|
||||||
}, // Configure texthooker startup launch and browser opening behavior.
|
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// WebSocket Server
|
// WebSocket Server
|
||||||
@@ -28,41 +26,17 @@
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
"websocket": {
|
"websocket": {
|
||||||
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
|
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
|
||||||
"port": 6677 // Built-in subtitle websocket server port.
|
"port": 6677, // Built-in subtitle websocket server port.
|
||||||
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
|
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
|
||||||
|
|
||||||
// ==========================================
|
|
||||||
// Annotation WebSocket
|
|
||||||
// Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
|
|
||||||
// Independent from websocket.auto and defaults to port 6678.
|
|
||||||
// ==========================================
|
|
||||||
"annotationWebsocket": {
|
|
||||||
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
|
|
||||||
"port": 6678 // Annotated subtitle websocket server port.
|
|
||||||
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
|
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Logging
|
// Logging
|
||||||
// Controls logging verbosity.
|
// Controls logging verbosity.
|
||||||
// Set to debug for full runtime diagnostics.
|
// Set to debug for full runtime diagnostics.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "info" // Minimum log level for runtime logging. Values: debug | info | warn | error
|
"level": "info", // Minimum log level for runtime logging. Values: debug | info | warn | error
|
||||||
}, // Controls logging verbosity.
|
}, // Controls logging verbosity. Keep this as an object; do not replace with a bare string.
|
||||||
|
|
||||||
// ==========================================
|
|
||||||
// Startup Warmups
|
|
||||||
// Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
|
|
||||||
// Disable individual warmups to defer load until first real usage.
|
|
||||||
// lowPowerMode defers all warmups except Yomitan extension.
|
|
||||||
// ==========================================
|
|
||||||
"startupWarmups": {
|
|
||||||
"lowPowerMode": false, // Defer startup warmups except Yomitan extension. Values: true | false
|
|
||||||
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
|
|
||||||
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
|
|
||||||
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
|
|
||||||
"jellyfinRemoteSession": true // Warm up Jellyfin remote session at startup. Values: true | false
|
|
||||||
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
|
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Keyboard Shortcuts
|
// Keyboard Shortcuts
|
||||||
@@ -82,7 +56,7 @@
|
|||||||
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
|
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
|
||||||
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
|
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
|
||||||
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
|
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
|
||||||
"openJimaku": "Ctrl+Shift+J" // Open jimaku setting.
|
"openJimaku": "Ctrl+Shift+J", // Open jimaku setting.
|
||||||
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -102,7 +76,7 @@
|
|||||||
"secondarySub": {
|
"secondarySub": {
|
||||||
"secondarySubLanguages": [], // Secondary sub languages setting.
|
"secondarySubLanguages": [], // Secondary sub languages setting.
|
||||||
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
|
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
|
||||||
"defaultMode": "hover" // Default mode setting.
|
"defaultMode": "hover", // Default mode setting.
|
||||||
}, // Dual subtitle track options.
|
}, // Dual subtitle track options.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -114,7 +88,7 @@
|
|||||||
"alass_path": "", // Alass path setting.
|
"alass_path": "", // Alass path setting.
|
||||||
"ffsubsync_path": "", // Ffsubsync path setting.
|
"ffsubsync_path": "", // Ffsubsync path setting.
|
||||||
"ffmpeg_path": "", // Ffmpeg path setting.
|
"ffmpeg_path": "", // Ffmpeg path setting.
|
||||||
"replace": true // Replace the active subtitle file when sync completes. Values: true | false
|
"replace": true, // Replace active subtitle file when synchronization succeeds.
|
||||||
}, // Subsync engine and executable paths.
|
}, // Subsync engine and executable paths.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -122,7 +96,7 @@
|
|||||||
// Initial vertical subtitle position from the bottom.
|
// Initial vertical subtitle position from the bottom.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"subtitlePosition": {
|
"subtitlePosition": {
|
||||||
"yPercent": 10 // Y percent setting.
|
"yPercent": 10, // Y percent setting.
|
||||||
}, // Initial vertical subtitle position from the bottom.
|
}, // Initial vertical subtitle position from the bottom.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -134,11 +108,8 @@
|
|||||||
"enableJlpt": false, // Enable JLPT vocabulary level underlines. When disabled, JLPT tagging lookup and underlines are skipped. Values: true | false
|
"enableJlpt": false, // Enable JLPT vocabulary level underlines. When disabled, JLPT tagging lookup and underlines are skipped. Values: true | false
|
||||||
"preserveLineBreaks": false, // Preserve line breaks in visible overlay subtitle rendering. When false, line breaks are flattened to spaces for a single-line flow. Values: true | false
|
"preserveLineBreaks": false, // Preserve line breaks in visible overlay subtitle rendering. When false, line breaks are flattened to spaces for a single-line flow. Values: true | false
|
||||||
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text, then resume on leave. Values: true | false
|
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text, then resume on leave. Values: true | false
|
||||||
"autoPauseVideoOnYomitanPopup": false, // Automatically pause mpv playback while Yomitan popup is open, then resume when popup closes. Values: true | false
|
|
||||||
"hoverTokenColor": "#f4dbd6", // Hex color used for hovered subtitle token highlight in mpv.
|
"hoverTokenColor": "#f4dbd6", // Hex color used for hovered subtitle token highlight in mpv.
|
||||||
"hoverTokenBackgroundColor": "rgba(54, 58, 79, 0.84)", // CSS color used for hovered subtitle token background highlight in mpv.
|
"hoverTokenBackgroundColor": "rgba(54, 58, 79, 0.84)", // CSS color used for hovered subtitle token background highlight in mpv.
|
||||||
"nameMatchEnabled": true, // Enable subtitle token coloring for matches from the SubMiner character dictionary. Values: true | false
|
|
||||||
"nameMatchColor": "#f5bde6", // Hex color used when a subtitle token matches an entry from the SubMiner character dictionary.
|
|
||||||
"fontFamily": "M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP", // Font family setting.
|
"fontFamily": "M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP", // Font family setting.
|
||||||
"fontSize": 35, // Font size setting.
|
"fontSize": 35, // Font size setting.
|
||||||
"fontColor": "#cad3f5", // Font color setting.
|
"fontColor": "#cad3f5", // Font color setting.
|
||||||
@@ -159,22 +130,16 @@
|
|||||||
"N2": "#f5a97f", // N2 setting.
|
"N2": "#f5a97f", // N2 setting.
|
||||||
"N3": "#f9e2af", // N3 setting.
|
"N3": "#f9e2af", // N3 setting.
|
||||||
"N4": "#a6e3a1", // N4 setting.
|
"N4": "#a6e3a1", // N4 setting.
|
||||||
"N5": "#8aadf4" // N5 setting.
|
"N5": "#8aadf4", // N5 setting.
|
||||||
}, // Jlpt colors setting.
|
}, // Jlpt colors setting.
|
||||||
"frequencyDictionary": {
|
"frequencyDictionary": {
|
||||||
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
|
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
|
||||||
"sourcePath": "", // Optional absolute path to a frequency dictionary directory. If empty, built-in discovery search paths are used.
|
"sourcePath": "", // Optional absolute path to a frequency dictionary directory. If empty, SubMiner searches installed/default frequency-dictionary locations.
|
||||||
"topX": 1000, // Only color tokens with frequency rank <= topX (default: 1000).
|
"topX": 1000, // Only color tokens with frequency rank <= topX (default: 1000).
|
||||||
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
|
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
|
||||||
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
|
"matchMode": "headword", // Frequency lookup text selection mode. Values: headword | surface
|
||||||
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
|
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
|
||||||
"bandedColors": [
|
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#a6e3a1", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
|
||||||
"#ed8796",
|
|
||||||
"#f5a97f",
|
|
||||||
"#f9e2af",
|
|
||||||
"#8bd5ca",
|
|
||||||
"#8aadf4"
|
|
||||||
] // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
|
|
||||||
}, // Frequency dictionary setting.
|
}, // Frequency dictionary setting.
|
||||||
"secondary": {
|
"secondary": {
|
||||||
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
|
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
|
||||||
@@ -189,8 +154,8 @@
|
|||||||
"backgroundColor": "transparent", // Background color setting.
|
"backgroundColor": "transparent", // Background color setting.
|
||||||
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
||||||
"fontWeight": "normal", // Font weight setting.
|
"fontWeight": "normal", // Font weight setting.
|
||||||
"fontStyle": "normal" // Font style setting.
|
"fontStyle": "normal", // Font style setting.
|
||||||
} // Secondary setting.
|
}, // Secondary setting.
|
||||||
}, // Primary and secondary subtitle styling.
|
}, // Primary and secondary subtitle styling.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -204,20 +169,18 @@
|
|||||||
"url": "http://127.0.0.1:8765", // Url setting.
|
"url": "http://127.0.0.1:8765", // Url setting.
|
||||||
"pollingRate": 3000, // Polling interval in milliseconds.
|
"pollingRate": 3000, // Polling interval in milliseconds.
|
||||||
"proxy": {
|
"proxy": {
|
||||||
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
|
"enabled": false, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
|
||||||
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
|
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
|
||||||
"port": 8766, // Bind port for local AnkiConnect proxy.
|
"port": 8766, // Bind port for local AnkiConnect proxy.
|
||||||
"upstreamUrl": "http://127.0.0.1:8765" // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
|
"upstreamUrl": "http://127.0.0.1:8765", // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
|
||||||
}, // Proxy setting.
|
}, // Proxy setting.
|
||||||
"tags": [
|
"tags": ["SubMiner"], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
|
||||||
"SubMiner"
|
|
||||||
], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
|
|
||||||
"fields": {
|
"fields": {
|
||||||
"audio": "ExpressionAudio", // Audio setting.
|
"audio": "ExpressionAudio", // Audio setting.
|
||||||
"image": "Picture", // Image setting.
|
"image": "Picture", // Image setting.
|
||||||
"sentence": "Sentence", // Sentence setting.
|
"sentence": "Sentence", // Sentence setting.
|
||||||
"miscInfo": "MiscInfo", // Misc info setting.
|
"miscInfo": "MiscInfo", // Misc info setting.
|
||||||
"translation": "SelectionText" // Translation setting.
|
"translation": "SelectionText", // Translation setting.
|
||||||
}, // Fields setting.
|
}, // Fields setting.
|
||||||
"ai": {
|
"ai": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
@@ -226,7 +189,7 @@
|
|||||||
"model": "openai/gpt-4o-mini", // Model setting.
|
"model": "openai/gpt-4o-mini", // Model setting.
|
||||||
"baseUrl": "https://openrouter.ai/api", // Base url setting.
|
"baseUrl": "https://openrouter.ai/api", // Base url setting.
|
||||||
"targetLanguage": "English", // Target language setting.
|
"targetLanguage": "English", // Target language setting.
|
||||||
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations." // System prompt setting.
|
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations.", // System prompt setting.
|
||||||
}, // Ai setting.
|
}, // Ai setting.
|
||||||
"media": {
|
"media": {
|
||||||
"generateAudio": true, // Generate audio setting. Values: true | false
|
"generateAudio": true, // Generate audio setting. Values: true | false
|
||||||
@@ -239,7 +202,7 @@
|
|||||||
"animatedCrf": 35, // Animated crf setting.
|
"animatedCrf": 35, // Animated crf setting.
|
||||||
"audioPadding": 0.5, // Audio padding setting.
|
"audioPadding": 0.5, // Audio padding setting.
|
||||||
"fallbackDuration": 3, // Fallback duration setting.
|
"fallbackDuration": 3, // Fallback duration setting.
|
||||||
"maxMediaDuration": 30 // Max media duration setting.
|
"maxMediaDuration": 30, // Max media duration setting.
|
||||||
}, // Media setting.
|
}, // Media setting.
|
||||||
"behavior": {
|
"behavior": {
|
||||||
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
|
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
|
||||||
@@ -247,7 +210,7 @@
|
|||||||
"mediaInsertMode": "append", // Media insert mode setting.
|
"mediaInsertMode": "append", // Media insert mode setting.
|
||||||
"highlightWord": true, // Highlight word setting. Values: true | false
|
"highlightWord": true, // Highlight word setting. Values: true | false
|
||||||
"notificationType": "osd", // Notification type setting.
|
"notificationType": "osd", // Notification type setting.
|
||||||
"autoUpdateNewCards": true // Automatically update newly added cards. Values: true | false
|
"autoUpdateNewCards": true, // Automatically update newly added cards. Values: true | false
|
||||||
}, // Behavior setting.
|
}, // Behavior setting.
|
||||||
"nPlusOne": {
|
"nPlusOne": {
|
||||||
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
|
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
|
||||||
@@ -256,20 +219,20 @@
|
|||||||
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
|
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
|
||||||
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
|
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
|
||||||
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
|
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
|
||||||
"knownWord": "#a6da95" // Color used for legacy known-word highlights.
|
"knownWord": "#a6da95", // Color used for legacy known-word highlights.
|
||||||
}, // N plus one setting.
|
}, // N plus one setting.
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"pattern": "[SubMiner] %f (%t)" // Pattern setting.
|
"pattern": "[SubMiner] %f (%t)", // Pattern setting.
|
||||||
}, // Metadata setting.
|
}, // Metadata setting.
|
||||||
"isLapis": {
|
"isLapis": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
"sentenceCardModel": "Japanese sentences" // Sentence card model setting.
|
"sentenceCardModel": "Japanese sentences", // Sentence card model setting.
|
||||||
}, // Is lapis setting.
|
}, // Is lapis setting.
|
||||||
"isKiku": {
|
"isKiku": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
|
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
|
||||||
"deleteDuplicateInAuto": true // Delete duplicate in auto setting. Values: true | false
|
"deleteDuplicateInAuto": true, // Delete duplicate in auto setting. Values: true | false
|
||||||
} // Is kiku setting.
|
}, // Is kiku setting.
|
||||||
}, // Automatic Anki updates and media generation options.
|
}, // Automatic Anki updates and media generation options.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -279,7 +242,7 @@
|
|||||||
"jimaku": {
|
"jimaku": {
|
||||||
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
|
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
|
||||||
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
|
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
|
||||||
"maxEntryResults": 10 // Maximum Jimaku search results returned.
|
"maxEntryResults": 10, // Maximum Jimaku search results returned.
|
||||||
}, // Jimaku API configuration and defaults.
|
}, // Jimaku API configuration and defaults.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -290,33 +253,16 @@
|
|||||||
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
|
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
|
||||||
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
|
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
|
||||||
"whisperModel": "", // Path to whisper model used for fallback transcription.
|
"whisperModel": "", // Path to whisper model used for fallback transcription.
|
||||||
"primarySubLanguages": [
|
"primarySubLanguages": ["ja", "jpn"], // Comma-separated primary subtitle language priority used by the launcher.
|
||||||
"ja",
|
|
||||||
"jpn"
|
|
||||||
] // Comma-separated primary subtitle language priority used by the launcher.
|
|
||||||
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
|
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Anilist
|
// Anilist
|
||||||
// Anilist API credentials and update behavior.
|
// Anilist API credentials and update behavior.
|
||||||
// Includes optional auto-sync for a merged MRU-based character dictionary in bundled Yomitan.
|
|
||||||
// Character dictionaries are keyed by AniList media ID (no season/franchise merge).
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"anilist": {
|
"anilist": {
|
||||||
"enabled": false, // Enable AniList post-watch progress updates. Values: true | false
|
"enabled": false, // Enable AniList post-watch progress updates. Values: true | false
|
||||||
"accessToken": "", // Optional explicit AniList access token override; leave empty to use locally stored token from setup.
|
"accessToken": "", // Optional explicit AniList access token override; leave empty to use locally stored token from setup.
|
||||||
"characterDictionary": {
|
|
||||||
"enabled": false, // Enable automatic Yomitan character dictionary sync for currently watched AniList media. Values: true | false
|
|
||||||
"refreshTtlHours": 168, // Legacy setting; merged character dictionary retention is now usage-based and this value is ignored.
|
|
||||||
"maxLoaded": 3, // Maximum number of most-recently-used anime snapshots included in the merged Yomitan character dictionary.
|
|
||||||
"evictionPolicy": "delete", // Legacy setting; merged character dictionary eviction is usage-based and this value is ignored. Values: disable | delete
|
|
||||||
"profileScope": "all", // Yomitan profile scope for dictionary enable/disable updates. Values: all | active
|
|
||||||
"collapsibleSections": {
|
|
||||||
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
|
|
||||||
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
|
|
||||||
"voicedBy": false // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
|
|
||||||
} // Collapsible sections setting.
|
|
||||||
} // Character dictionary setting.
|
|
||||||
}, // Anilist API credentials and update behavior.
|
}, // Anilist API credentials and update behavior.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -340,16 +286,8 @@
|
|||||||
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
|
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
|
||||||
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
|
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
|
||||||
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
|
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
|
||||||
"directPlayContainers": [
|
"directPlayContainers": ["mkv", "mp4", "webm", "mov", "flac", "mp3", "aac"], // Container allowlist for direct play decisions.
|
||||||
"mkv",
|
"transcodeVideoCodec": "h264", // Preferred transcode video codec when direct play is unavailable.
|
||||||
"mp4",
|
|
||||||
"webm",
|
|
||||||
"mov",
|
|
||||||
"flac",
|
|
||||||
"mp3",
|
|
||||||
"aac"
|
|
||||||
], // Container allowlist for direct play decisions.
|
|
||||||
"transcodeVideoCodec": "h264" // Preferred transcode video codec when direct play is unavailable.
|
|
||||||
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
|
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -360,7 +298,7 @@
|
|||||||
"discordPresence": {
|
"discordPresence": {
|
||||||
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
|
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
|
||||||
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
|
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
|
||||||
"debounceMs": 750 // Debounce delay used to collapse bursty presence updates.
|
"debounceMs": 750, // Debounce delay used to collapse bursty presence updates.
|
||||||
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
|
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -382,7 +320,7 @@
|
|||||||
"telemetryDays": 30, // Telemetry retention window in days.
|
"telemetryDays": 30, // Telemetry retention window in days.
|
||||||
"dailyRollupsDays": 365, // Daily rollup retention window in days.
|
"dailyRollupsDays": 365, // Daily rollup retention window in days.
|
||||||
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
|
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
|
||||||
"vacuumIntervalDays": 7 // Minimum days between VACUUM runs.
|
"vacuumIntervalDays": 7, // Minimum days between VACUUM runs.
|
||||||
} // Retention setting.
|
}, // Retention setting.
|
||||||
} // Enable/disable immersion tracking.
|
}, // Enable/disable immersion tracking.
|
||||||
}
|
}
|
||||||
|
|||||||
155
docs/plans/2026-03-06-immersion-sqlite-verification.md
Normal file
155
docs/plans/2026-03-06-immersion-sqlite-verification.md
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
# Immersion SQLite Verification Implementation Plan
|
||||||
|
|
||||||
|
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||||
|
|
||||||
|
**Goal:** Make the SQLite-backed immersion tracking persistence tests visible in the repo's verification surface and reproducible through at least one documented automated command.
|
||||||
|
|
||||||
|
**Architecture:** Keep the existing Bun fast lane intact for routine local verification, but add an explicit SQLite verification lane that runs the database-backed immersion tests under a runtime with `node:sqlite` support. Surface unsupported-runtime behavior clearly in the source tests and contributor docs so skipped or omitted coverage is no longer mistaken for a fully green persistence lane.
|
||||||
|
|
||||||
|
**Tech Stack:** TypeScript, Bun scripts in `package.json`, Node's built-in `node:test` and `node:sqlite`, GitHub Actions workflows, Markdown docs in `README.md`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Audit and expose the SQLite-backed immersion test surface
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `src/core/services/immersion-tracker-service.test.ts`
|
||||||
|
- Modify: `src/core/services/immersion-tracker/storage-session.test.ts`
|
||||||
|
- Reference: `src/main/runtime/registry.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Refactor the SQLite-gated immersion tests so missing `node:sqlite` support is reported with an explicit skip reason instead of a silent top-level `test.skip` alias.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
|
||||||
|
Expected: the current output shows generic skips or hides the storage-session suite from normal scripted verification, which is too opaque for contributors.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Mirror the `src/main/runtime/registry.test.ts` pattern: add a helper that either loads `DatabaseSync` or skips with a message like `requires node:sqlite support in this runtime`, then wrap each SQLite-backed test through that helper.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
|
||||||
|
Expected: PASS, with explicit skip messages in unsupported runtimes.
|
||||||
|
|
||||||
|
### Task 2: Add a reproducible SQLite verification command
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `package.json`
|
||||||
|
- Reference: `src/core/services/immersion-tracker-service.test.ts`
|
||||||
|
- Reference: `src/core/services/immersion-tracker/storage-session.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a dedicated script contract for the SQLite-backed immersion verification lane so both persistence-heavy suites are intentionally grouped and runnable together.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:immersion:sqlite`
|
||||||
|
Expected: FAIL because no such reproducible lane exists yet.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update `package.json` with explicit scripts for the SQLite lane. Prefer a command shape that actually executes the built JS tests under Node with `node:sqlite` support, for example:
|
||||||
|
|
||||||
|
- `test:immersion:sqlite:dist`: `node --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js`
|
||||||
|
- `test:immersion:sqlite`: `bun run build && bun run test:immersion:sqlite:dist`
|
||||||
|
|
||||||
|
If build cost or runtime behavior requires a small adjustment, keep the core contract the same: one documented command must run both SQLite-backed immersion suites end-to-end.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:immersion:sqlite`
|
||||||
|
Expected: PASS in a Node runtime with `node:sqlite`, executing both persistence suites without Bun-only skips.
|
||||||
|
|
||||||
|
### Task 3: Wire the SQLite lane into automated verification
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `.github/workflows/ci.yml`
|
||||||
|
- Modify: `.github/workflows/release.yml`
|
||||||
|
- Reference: `package.json`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add the new SQLite immersion lane to the repo's automated verification so contributors and CI can rely on a real persistence check rather than the Bun fast lane alone.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:immersion:sqlite`
|
||||||
|
Expected: local command may pass, but CI/release workflows still omit the lane entirely.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update both workflows to provision a Node version with `node:sqlite` support before the SQLite lane runs, then execute `bun run test:immersion:sqlite` in the quality gate after the bundle build produces `dist/**` test files.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:immersion:sqlite`
|
||||||
|
Expected: PASS locally, and workflow definitions clearly show the SQLite lane as part of automated verification.
|
||||||
|
|
||||||
|
### Task 4: Document contributor-facing prerequisites and commands
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `README.md`
|
||||||
|
- Reference: `package.json`
|
||||||
|
- Reference: `.github/workflows/ci.yml`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Extend the verification docs so contributors can discover the SQLite lane, know why the Bun source lane may skip those cases, and understand which command reproduces the persistence coverage.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `grep -n "test:immersion:sqlite" README.md`
|
||||||
|
Expected: FAIL because the dedicated immersion SQLite lane is undocumented.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update `README.md` to document:
|
||||||
|
|
||||||
|
- the Bun fast/default lane versus the SQLite persistence lane
|
||||||
|
- the `node:sqlite` prerequisite for the reproducible command
|
||||||
|
- that the dedicated lane covers session persistence/finalization behavior beyond seam tests
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `grep -n "test:immersion:sqlite" README.md && grep -n "node:sqlite" README.md`
|
||||||
|
Expected: PASS, with clear contributor guidance.
|
||||||
|
|
||||||
|
### Task 5: Verify persistence coverage end-to-end
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Test: `src/core/services/immersion-tracker-service.test.ts`
|
||||||
|
- Test: `src/core/services/immersion-tracker/storage-session.test.ts`
|
||||||
|
- Reference: `README.md`
|
||||||
|
- Reference: `package.json`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Prove the final lane exercises real DB-backed persistence/finalization paths, not just the seam tests.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:immersion:sqlite`
|
||||||
|
Expected: before implementation, the command does not exist or does not cover both SQLite-backed suites.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Keep the dedicated lane pointed at both existing SQLite-backed test files so it covers representative finalization and persistence behavior such as:
|
||||||
|
|
||||||
|
- `destroy finalizes active session and persists final telemetry`
|
||||||
|
- `start/finalize session updates ended_at and status`
|
||||||
|
- `executeQueuedWrite inserts event and telemetry rows`
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:immersion:sqlite`
|
||||||
|
Expected: PASS, with those DB-backed persistence/finalization cases executing successfully under Node.
|
||||||
92
docs/plans/2026-03-06-merged-character-dictionary.md
Normal file
92
docs/plans/2026-03-06-merged-character-dictionary.md
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
# Merged Character Dictionary Implementation Plan
|
||||||
|
|
||||||
|
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||||
|
|
||||||
|
**Goal:** Replace per-anime character dictionary imports with one merged Yomitan dictionary driven by MRU usage retention.
|
||||||
|
|
||||||
|
**Architecture:** Persist normalized per-media character dictionary snapshots locally, maintain MRU retained media ids in auto-sync state, and rebuild a single merged Yomitan zip only when the retained set changes. Keep external AniList fetches only for media without a local snapshot; normal revisits stay local.
|
||||||
|
|
||||||
|
**Tech Stack:** TypeScript, Bun test, Node fs/path, existing Yomitan zip generation helpers.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Lock in merged auto-sync behavior
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/main/runtime/character-dictionary-auto-sync.test.ts`
|
||||||
|
- Test: `src/main/runtime/character-dictionary-auto-sync.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add tests for:
|
||||||
|
- single merged dictionary title/import replacing per-media imports
|
||||||
|
- MRU reorder causing rebuild only when order changes
|
||||||
|
- unchanged revisit skipping rebuild/import
|
||||||
|
- capped retained set evicting least-recently-used media
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
|
||||||
|
Expected: FAIL on old per-media import assumptions / missing merged behavior
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update auto-sync runtime to track retained media ids and merged revision/hash, call merged zip builder, and replace one imported Yomitan dictionary.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
### Task 2: Add snapshot + merged-zip runtime support
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/main/character-dictionary-runtime.ts`
|
||||||
|
- Modify: `src/main/character-dictionary-runtime.test.ts`
|
||||||
|
- Test: `src/main/character-dictionary-runtime.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add tests for:
|
||||||
|
- saving/loading normalized per-media snapshots without per-media zip cache
|
||||||
|
- building merged zip from retained media snapshots with stable dictionary title
|
||||||
|
- preserving images/terms from multiple media in merged output
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun test src/main/character-dictionary-runtime.test.ts`
|
||||||
|
Expected: FAIL because snapshot/merged APIs do not exist yet
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Refactor dictionary runtime to expose snapshot generation/loading and merged zip building from stored metadata/images.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun test src/main/character-dictionary-runtime.test.ts`
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
### Task 3: Wire app/runtime config and docs
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `src/main.ts`
|
||||||
|
- Modify: `src/config/definitions/options-integrations.ts`
|
||||||
|
- Modify: `README.md`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add or update tests if needed for new dependency wiring / docs-adjacent config description expectations.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts`
|
||||||
|
Expected: FAIL until wiring matches merged flow
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Swap app wiring to new snapshot + merged build API, update config/docs text from TTL semantics to usage-based merged retention.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts && bun run tsc --noEmit`
|
||||||
|
Expected: PASS
|
||||||
121
docs/plans/2026-03-06-subtitle-sync-verification.md
Normal file
121
docs/plans/2026-03-06-subtitle-sync-verification.md
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
# Subtitle Sync Verification Implementation Plan
|
||||||
|
|
||||||
|
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||||
|
|
||||||
|
**Goal:** Replace the no-op `test:subtitle` lane with real automated subtitle-sync verification that reuses the maintained subsync tests and documents the real contributor workflow.
|
||||||
|
|
||||||
|
**Architecture:** Repoint the subtitle verification command at the existing source-level subsync tests instead of inventing a second hidden suite. Add one focused ffsubsync failure-path test so the subtitle lane explicitly covers both engines plus a non-happy path, then update contributor docs to describe the dedicated subtitle lane and how it relates to `test:core`.
|
||||||
|
|
||||||
|
**Tech Stack:** TypeScript, Bun test, Node test/assert, npm package scripts, Markdown docs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Lock subtitle lane to real subsync tests
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `package.json`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Define the intended command shape first: `test:subtitle:src` should run `src/core/services/subsync.test.ts` and `src/subsync/utils.test.ts`, `test:subtitle` should invoke that real source lane, and no placeholder echo should remain.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:subtitle`
|
||||||
|
Expected: It performs a build and prints `Subtitle tests are currently not configured`, proving the lane is still a no-op.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update `package.json` so:
|
||||||
|
|
||||||
|
- `test:subtitle:src` runs `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`
|
||||||
|
- `test:subtitle` runs the new source lane directly
|
||||||
|
- `test:subtitle:dist` is removed if it is no longer the real verification path
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:subtitle`
|
||||||
|
Expected: PASS with Bun executing the real subtitle-sync test files.
|
||||||
|
|
||||||
|
### Task 2: Add explicit ffsubsync non-happy-path coverage
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `src/core/services/subsync.test.ts`
|
||||||
|
- Test: `src/core/services/subsync.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a test that runs `runSubsyncManual({ engine: 'ffsubsync' })` with a stub ffsubsync executable that exits non-zero and writes stderr, then assert:
|
||||||
|
|
||||||
|
- `result.ok === false`
|
||||||
|
- `result.message` starts with `ffsubsync synchronization failed`
|
||||||
|
- the failure message includes command details surfaced to the user
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun test src/core/services/subsync.test.ts`
|
||||||
|
Expected: FAIL because ffsubsync failure propagation is not asserted yet.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Keep production code unchanged unless the new test exposes a real bug. If needed, tighten failure assertions or message propagation in `src/core/services/subsync.ts` without changing successful behavior.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun test src/core/services/subsync.test.ts`
|
||||||
|
Expected: PASS with both alass and ffsubsync paths covered, including a non-happy path.
|
||||||
|
|
||||||
|
### Task 3: Make contributor docs match the real verification path
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `README.md`
|
||||||
|
- Modify: `package.json`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Use the repository state as the failure signal: README currently advertises subtitle sync as a feature but does not tell contributors that `bun run test:subtitle` is the real verification lane.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:subtitle && bun test src/subsync/utils.test.ts`
|
||||||
|
Expected: Tests pass, but docs still do not explain the lane; this is the remaining acceptance-criteria gap.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update `README.md` with a short contributor-facing verification note that:
|
||||||
|
|
||||||
|
- points to `bun run test:subtitle` for subtitle-sync coverage
|
||||||
|
- states that the lane reuses the maintained subsync tests already included in broader core coverage
|
||||||
|
- avoids implying there is a separate hidden subtitle test harness beyond those tests
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:subtitle`
|
||||||
|
Expected: PASS, with docs and scripts now aligned around the same subtitle verification strategy.
|
||||||
|
|
||||||
|
### Task 4: Verify matrix integration stays clean
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `package.json` (only if Task 1/3 exposed cleanup needs)
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Treat duplication as the failure condition: confirm the dedicated subtitle lane reuses the same maintained files already present in `test:core:src` rather than creating a second divergent suite.
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:subtitle && bun run test:core:src`
|
||||||
|
Expected: If file lists diverge unexpectedly, this review step exposes it before handoff.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
If needed, do the smallest script cleanup necessary so subtitle coverage remains explicit without hiding or duplicating existing core coverage.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:subtitle && bun run test:core:src`
|
||||||
|
Expected: PASS, confirming the dedicated lane and the broader core suite agree on subtitle coverage.
|
||||||
169
docs/plans/2026-03-06-testing-workflow-test-matrix.md
Normal file
169
docs/plans/2026-03-06-testing-workflow-test-matrix.md
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# Testing Workflow Test Matrix Implementation Plan
|
||||||
|
|
||||||
|
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||||
|
|
||||||
|
**Goal:** Make the standard test commands reflect the maintained test surface so newly added tests are discovered automatically or intentionally documented outside the default lane.
|
||||||
|
|
||||||
|
**Architecture:** Replace the current hand-maintained file allowlists in `package.json` with directory-based Bun test lanes that map to maintained test surfaces. Keep the default developer lane fast, move slower or environment-specific checks into explicit commands, and document the resulting matrix in `README.md` so contributors know exactly which command to run.
|
||||||
|
|
||||||
|
**Tech Stack:** TypeScript, Bun test, npm-style package scripts in `package.json`, Markdown docs in `README.md`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Lock in the desired script matrix with failing tests/audit checks
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `package.json`
|
||||||
|
- Test: `package.json`
|
||||||
|
- Reference: `src/main-entry-runtime.test.ts`
|
||||||
|
- Reference: `src/anki-integration/anki-connect-proxy.test.ts`
|
||||||
|
- Reference: `src/main/runtime/registry.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a new script structure in `package.json` expectations by editing the script map so these lanes exist conceptually:
|
||||||
|
|
||||||
|
- `test:fast` for default fast verification
|
||||||
|
- `test:full` for the maintained source test surface
|
||||||
|
- `test:env` for environment-specific checks
|
||||||
|
|
||||||
|
The fast lane should stay selective and intentional. The full lane should use directory-based discovery rather than file-by-file allowlists, with representative coverage from:
|
||||||
|
|
||||||
|
- `src/main-entry-runtime.test.ts`
|
||||||
|
- `src/anki-integration/**/*.test.ts`
|
||||||
|
- `src/main/**/*.test.ts`
|
||||||
|
- `launcher/**/*.test.ts`
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:full`
|
||||||
|
Expected: FAIL because `test:full` does not exist yet, and previously omitted maintained tests are still outside the standard matrix.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update `package.json` scripts so:
|
||||||
|
|
||||||
|
- `test` points at `test:fast`
|
||||||
|
- `test:fast` runs the fast default lane only
|
||||||
|
- `test:full` runs directory-based maintained suites instead of file allowlists
|
||||||
|
- `test:env` runs environment-specific verification (for example launcher/plugin and sqlite-gated suites)
|
||||||
|
- subsystem scripts use stable path globs or directory arguments so new tests are discovered automatically
|
||||||
|
|
||||||
|
Prefer commands like these, adjusted only as needed for Bun behavior in this repo:
|
||||||
|
|
||||||
|
- `bun test src/config/**/*.test.ts`
|
||||||
|
- `bun test src/{cli,core,renderer,subtitle,subsync,main,anki-integration}/*.test.ts ...` only if Bun cannot take the broader directory directly
|
||||||
|
- `bun test launcher/**/*.test.ts`
|
||||||
|
|
||||||
|
Do not keep large hand-maintained file enumerations for maintained unit/integration lanes.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:full`
|
||||||
|
Expected: PASS, including automated execution of representative tests that were previously omitted from the standard matrix.
|
||||||
|
|
||||||
|
### Task 2: Separate environment-specific verification from the maintained default/full lanes
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `package.json`
|
||||||
|
- Test: `src/main/runtime/registry.test.ts`
|
||||||
|
- Test: `launcher/smoke.e2e.test.ts`
|
||||||
|
- Test: `src/core/services/immersion-tracker-service.test.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Refine the package scripts so environment-specific checks are explicitly grouped outside the default fast lane. Treat these as the primary environment-specific examples unless repo behavior proves a better split during execution:
|
||||||
|
|
||||||
|
- launcher smoke/plugin checks that rely on local process or Lua execution
|
||||||
|
- sqlite-dependent checks that may skip when `node:sqlite` is unavailable
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:env`
|
||||||
|
Expected: FAIL because the environment-specific lane is not defined yet.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Add explicit environment-specific scripts in `package.json`, such as:
|
||||||
|
|
||||||
|
- a launcher/plugin lane that runs `launcher/smoke.e2e.test.ts` plus `lua scripts/test-plugin-start-gate.lua`
|
||||||
|
- a sqlite lane for tests that require `node:sqlite` support or otherwise need environment notes
|
||||||
|
- an aggregate `test:env` command that runs all environment-specific lanes
|
||||||
|
|
||||||
|
Keep these lanes documented and reproducible rather than silently excluded.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun run test:env`
|
||||||
|
Expected: PASS in supported environments, or clear documented skip behavior where the tests themselves intentionally gate on missing runtime support.
|
||||||
|
|
||||||
|
### Task 3: Document contributor-facing test commands and matrix
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Modify: `README.md`
|
||||||
|
- Reference: `package.json`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a contributor-focused testing section requirement in `README.md` expectations:
|
||||||
|
|
||||||
|
- fast verification command
|
||||||
|
- full verification command
|
||||||
|
- environment-specific verification command
|
||||||
|
- plain-language explanation of which suites each lane covers and why
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `grep -n "Testing" README.md`
|
||||||
|
Expected: no contributor testing matrix section exists yet.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Update `README.md` with a concise `Testing` section that documents:
|
||||||
|
|
||||||
|
- `bun run test` / `bun run test:fast` for fast local verification
|
||||||
|
- `bun run test:full` for the maintained source test surface
|
||||||
|
- `bun run test:env` for environment-specific verification
|
||||||
|
- any important notes about sqlite-gated tests and launcher/plugin checks
|
||||||
|
|
||||||
|
Keep the matrix concrete and reproducible.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `grep -n "Testing" README.md && grep -n "test:full" README.md && grep -n "test:env" README.md`
|
||||||
|
Expected: PASS with the new contributor-facing matrix present.
|
||||||
|
|
||||||
|
### Task 4: Verify representative omitted suites now belong to automated lanes
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- Test: `src/main-entry-runtime.test.ts`
|
||||||
|
- Test: `src/anki-integration/anki-connect-proxy.test.ts`
|
||||||
|
- Test: `src/main/runtime/registry.test.ts`
|
||||||
|
- Reference: `package.json`
|
||||||
|
- Reference: `README.md`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Use targeted command checks to prove these previously omitted surfaces are now in the matrix:
|
||||||
|
|
||||||
|
- entry/runtime: `src/main-entry-runtime.test.ts`
|
||||||
|
- Anki integration: `src/anki-integration/anki-connect-proxy.test.ts`
|
||||||
|
- main runtime: `src/main/runtime/registry.test.ts`
|
||||||
|
|
||||||
|
**Step 2: Run test to verify it fails**
|
||||||
|
|
||||||
|
Run: `bun run test:full src/main-entry-runtime.test.ts`
|
||||||
|
Expected: either unsupported invocation or evidence that the current matrix still does not include these surfaces automatically.
|
||||||
|
|
||||||
|
**Step 3: Write minimal implementation**
|
||||||
|
|
||||||
|
Adjust the final script paths/globs until the full matrix includes those representative surfaces without file-by-file script maintenance.
|
||||||
|
|
||||||
|
**Step 4: Run test to verify it passes**
|
||||||
|
|
||||||
|
Run: `bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/main/runtime/registry.test.ts && bun run test:fast && bun run test:full`
|
||||||
|
Expected: PASS, with at least one representative test from each required surface executing through the documented automated lanes.
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
import os from 'node:os';
|
import path from 'node:path';
|
||||||
import { spawn } from 'node:child_process';
|
|
||||||
import { fail, log } from '../log.js';
|
import { fail, log } from '../log.js';
|
||||||
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
|
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
|
||||||
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
|
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
|
||||||
@@ -15,15 +14,6 @@ import {
|
|||||||
import { generateYoutubeSubtitles } from '../youtube.js';
|
import { generateYoutubeSubtitles } from '../youtube.js';
|
||||||
import type { Args } from '../types.js';
|
import type { Args } from '../types.js';
|
||||||
import type { LauncherCommandContext } from './context.js';
|
import type { LauncherCommandContext } from './context.js';
|
||||||
import { ensureLauncherSetupReady } from '../setup-gate.js';
|
|
||||||
import {
|
|
||||||
getDefaultConfigDir,
|
|
||||||
getSetupStatePath,
|
|
||||||
readSetupState,
|
|
||||||
} from '../../src/shared/setup-state.js';
|
|
||||||
|
|
||||||
const SETUP_WAIT_TIMEOUT_MS = 10 * 60 * 1000;
|
|
||||||
const SETUP_POLL_INTERVAL_MS = 500;
|
|
||||||
|
|
||||||
function checkDependencies(args: Args): void {
|
function checkDependencies(args: Args): void {
|
||||||
const missing: string[] = [];
|
const missing: string[] = [];
|
||||||
@@ -95,47 +85,12 @@ function registerCleanup(context: LauncherCommandContext): void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function ensurePlaybackSetupReady(context: LauncherCommandContext): Promise<void> {
|
|
||||||
const { args, appPath } = context;
|
|
||||||
if (!appPath) return;
|
|
||||||
|
|
||||||
const configDir = getDefaultConfigDir({
|
|
||||||
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
|
||||||
homeDir: os.homedir(),
|
|
||||||
});
|
|
||||||
const statePath = getSetupStatePath(configDir);
|
|
||||||
const ready = await ensureLauncherSetupReady({
|
|
||||||
readSetupState: () => readSetupState(statePath),
|
|
||||||
launchSetupApp: () => {
|
|
||||||
const setupArgs = ['--background', '--setup'];
|
|
||||||
if (args.logLevel) {
|
|
||||||
setupArgs.push('--log-level', args.logLevel);
|
|
||||||
}
|
|
||||||
const child = spawn(appPath, setupArgs, {
|
|
||||||
detached: true,
|
|
||||||
stdio: 'ignore',
|
|
||||||
});
|
|
||||||
child.unref();
|
|
||||||
},
|
|
||||||
sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
|
|
||||||
now: () => Date.now(),
|
|
||||||
timeoutMs: SETUP_WAIT_TIMEOUT_MS,
|
|
||||||
pollIntervalMs: SETUP_POLL_INTERVAL_MS,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!ready) {
|
|
||||||
fail('SubMiner setup is incomplete. Complete setup in the app, then retry playback.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> {
|
export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> {
|
||||||
const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context;
|
const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context;
|
||||||
if (!appPath) {
|
if (!appPath) {
|
||||||
fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.');
|
fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.');
|
||||||
}
|
}
|
||||||
|
|
||||||
await ensurePlaybackSetupReady(context);
|
|
||||||
|
|
||||||
if (!args.target) {
|
if (!args.target) {
|
||||||
checkPickerDependencies(args);
|
checkPickerDependencies(args);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,107 +0,0 @@
|
|||||||
import test from 'node:test';
|
|
||||||
import assert from 'node:assert/strict';
|
|
||||||
import { ensureLauncherSetupReady, waitForSetupCompletion } from './setup-gate';
|
|
||||||
import type { SetupState } from '../src/shared/setup-state';
|
|
||||||
|
|
||||||
test('waitForSetupCompletion resolves completed and cancelled states', async () => {
|
|
||||||
const sequence: Array<SetupState | null> = [
|
|
||||||
null,
|
|
||||||
{
|
|
||||||
version: 1,
|
|
||||||
status: 'in_progress',
|
|
||||||
completedAt: null,
|
|
||||||
completionSource: null,
|
|
||||||
lastSeenYomitanDictionaryCount: 0,
|
|
||||||
pluginInstallStatus: 'unknown',
|
|
||||||
pluginInstallPathSummary: null,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
version: 1,
|
|
||||||
status: 'completed',
|
|
||||||
completedAt: '2026-03-07T00:00:00.000Z',
|
|
||||||
completionSource: 'user',
|
|
||||||
lastSeenYomitanDictionaryCount: 1,
|
|
||||||
pluginInstallStatus: 'skipped',
|
|
||||||
pluginInstallPathSummary: null,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await waitForSetupCompletion({
|
|
||||||
readSetupState: () => sequence.shift() ?? null,
|
|
||||||
sleep: async () => undefined,
|
|
||||||
now: (() => {
|
|
||||||
let value = 0;
|
|
||||||
return () => (value += 100);
|
|
||||||
})(),
|
|
||||||
timeoutMs: 5_000,
|
|
||||||
pollIntervalMs: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(result, 'completed');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ensureLauncherSetupReady launches setup app and resumes only after completion', async () => {
|
|
||||||
const calls: string[] = [];
|
|
||||||
let reads = 0;
|
|
||||||
|
|
||||||
const ready = await ensureLauncherSetupReady({
|
|
||||||
readSetupState: () => {
|
|
||||||
reads += 1;
|
|
||||||
if (reads === 1) return null;
|
|
||||||
if (reads === 2) {
|
|
||||||
return {
|
|
||||||
version: 1,
|
|
||||||
status: 'in_progress',
|
|
||||||
completedAt: null,
|
|
||||||
completionSource: null,
|
|
||||||
lastSeenYomitanDictionaryCount: 0,
|
|
||||||
pluginInstallStatus: 'unknown',
|
|
||||||
pluginInstallPathSummary: null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
version: 1,
|
|
||||||
status: 'completed',
|
|
||||||
completedAt: '2026-03-07T00:00:00.000Z',
|
|
||||||
completionSource: 'user',
|
|
||||||
lastSeenYomitanDictionaryCount: 1,
|
|
||||||
pluginInstallStatus: 'installed',
|
|
||||||
pluginInstallPathSummary: '/tmp/mpv',
|
|
||||||
};
|
|
||||||
},
|
|
||||||
launchSetupApp: () => {
|
|
||||||
calls.push('launch');
|
|
||||||
},
|
|
||||||
sleep: async () => undefined,
|
|
||||||
now: (() => {
|
|
||||||
let value = 0;
|
|
||||||
return () => (value += 100);
|
|
||||||
})(),
|
|
||||||
timeoutMs: 5_000,
|
|
||||||
pollIntervalMs: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(ready, true);
|
|
||||||
assert.deepEqual(calls, ['launch']);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ensureLauncherSetupReady fails on timeout/cancelled state', async () => {
|
|
||||||
const result = await ensureLauncherSetupReady({
|
|
||||||
readSetupState: () => ({
|
|
||||||
version: 1,
|
|
||||||
status: 'cancelled',
|
|
||||||
completedAt: null,
|
|
||||||
completionSource: null,
|
|
||||||
lastSeenYomitanDictionaryCount: 0,
|
|
||||||
pluginInstallStatus: 'unknown',
|
|
||||||
pluginInstallPathSummary: null,
|
|
||||||
}),
|
|
||||||
launchSetupApp: () => undefined,
|
|
||||||
sleep: async () => undefined,
|
|
||||||
now: () => 0,
|
|
||||||
timeoutMs: 5_000,
|
|
||||||
pollIntervalMs: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(result, false);
|
|
||||||
});
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
import { isSetupCompleted, type SetupState } from '../src/shared/setup-state.js';
|
|
||||||
|
|
||||||
export async function waitForSetupCompletion(deps: {
|
|
||||||
readSetupState: () => SetupState | null;
|
|
||||||
sleep: (ms: number) => Promise<void>;
|
|
||||||
now: () => number;
|
|
||||||
timeoutMs: number;
|
|
||||||
pollIntervalMs: number;
|
|
||||||
}): Promise<'completed' | 'cancelled' | 'timeout'> {
|
|
||||||
const deadline = deps.now() + deps.timeoutMs;
|
|
||||||
|
|
||||||
while (deps.now() <= deadline) {
|
|
||||||
const state = deps.readSetupState();
|
|
||||||
if (isSetupCompleted(state)) {
|
|
||||||
return 'completed';
|
|
||||||
}
|
|
||||||
if (state?.status === 'cancelled') {
|
|
||||||
return 'cancelled';
|
|
||||||
}
|
|
||||||
await deps.sleep(deps.pollIntervalMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 'timeout';
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function ensureLauncherSetupReady(deps: {
|
|
||||||
readSetupState: () => SetupState | null;
|
|
||||||
launchSetupApp: () => void;
|
|
||||||
sleep: (ms: number) => Promise<void>;
|
|
||||||
now: () => number;
|
|
||||||
timeoutMs: number;
|
|
||||||
pollIntervalMs: number;
|
|
||||||
}): Promise<boolean> {
|
|
||||||
if (isSetupCompleted(deps.readSetupState())) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
deps.launchSetupApp();
|
|
||||||
const result = await waitForSetupCompletion(deps);
|
|
||||||
return result === 'completed';
|
|
||||||
}
|
|
||||||
@@ -4,13 +4,6 @@ import fs from 'node:fs';
|
|||||||
import os from 'node:os';
|
import os from 'node:os';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import { spawn, spawnSync } from 'node:child_process';
|
import { spawn, spawnSync } from 'node:child_process';
|
||||||
import {
|
|
||||||
createDefaultSetupState,
|
|
||||||
getDefaultConfigDir,
|
|
||||||
getSetupStatePath,
|
|
||||||
readSetupState,
|
|
||||||
writeSetupState,
|
|
||||||
} from '../src/shared/setup-state.js';
|
|
||||||
|
|
||||||
type RunResult = {
|
type RunResult = {
|
||||||
status: number | null;
|
status: number | null;
|
||||||
@@ -32,9 +25,6 @@ type SmokeCase = {
|
|||||||
mpvOverlayLogPath: string;
|
mpvOverlayLogPath: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const LAUNCHER_RUN_TIMEOUT_MS = 25000;
|
|
||||||
const LONG_SMOKE_TEST_TIMEOUT_MS = 30000;
|
|
||||||
|
|
||||||
function writeExecutable(filePath: string, body: string): void {
|
function writeExecutable(filePath: string, body: string): void {
|
||||||
fs.writeFileSync(filePath, body);
|
fs.writeFileSync(filePath, body);
|
||||||
fs.chmodSync(filePath, 0o755);
|
fs.chmodSync(filePath, 0o755);
|
||||||
@@ -65,13 +55,6 @@ function createSmokeCase(name: string): SmokeCase {
|
|||||||
`socket_path=${socketPath}\n`,
|
`socket_path=${socketPath}\n`,
|
||||||
);
|
);
|
||||||
|
|
||||||
const configDir = getDefaultConfigDir({ xdgConfigHome, homeDir });
|
|
||||||
const setupState = createDefaultSetupState();
|
|
||||||
setupState.status = 'completed';
|
|
||||||
setupState.completedAt = '2026-03-07T00:00:00.000Z';
|
|
||||||
setupState.completionSource = 'user';
|
|
||||||
writeSetupState(getSetupStatePath(configDir), setupState);
|
|
||||||
|
|
||||||
const fakeMpvLogPath = path.join(artifactsDir, 'fake-mpv.log');
|
const fakeMpvLogPath = path.join(artifactsDir, 'fake-mpv.log');
|
||||||
const fakeAppLogPath = path.join(artifactsDir, 'fake-app.log');
|
const fakeAppLogPath = path.join(artifactsDir, 'fake-app.log');
|
||||||
const fakeAppStartLogPath = path.join(artifactsDir, 'fake-app-start.log');
|
const fakeAppStartLogPath = path.join(artifactsDir, 'fake-app-start.log');
|
||||||
@@ -179,7 +162,7 @@ function runLauncher(
|
|||||||
{
|
{
|
||||||
env,
|
env,
|
||||||
encoding: 'utf8',
|
encoding: 'utf8',
|
||||||
timeout: LAUNCHER_RUN_TIMEOUT_MS,
|
timeout: 15000,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -238,22 +221,6 @@ async function waitForJsonLines(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
test('launcher smoke fixture seeds completed setup state', () => {
|
|
||||||
const smokeCase = createSmokeCase('setup-state');
|
|
||||||
try {
|
|
||||||
const configDir = getDefaultConfigDir({
|
|
||||||
xdgConfigHome: smokeCase.xdgConfigHome,
|
|
||||||
homeDir: smokeCase.homeDir,
|
|
||||||
});
|
|
||||||
const statePath = getSetupStatePath(configDir);
|
|
||||||
|
|
||||||
assert.equal(readSetupState(statePath)?.status, 'completed');
|
|
||||||
} finally {
|
|
||||||
fs.rmSync(smokeCase.root, { recursive: true, force: true });
|
|
||||||
fs.rmSync(smokeCase.socketDir, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('launcher mpv status returns ready when socket is connectable', async () => {
|
test('launcher mpv status returns ready when socket is connectable', async () => {
|
||||||
await withSmokeCase('mpv-status', async (smokeCase) => {
|
await withSmokeCase('mpv-status', async (smokeCase) => {
|
||||||
const env = makeTestEnv(smokeCase);
|
const env = makeTestEnv(smokeCase);
|
||||||
@@ -296,7 +263,7 @@ test('launcher mpv status returns ready when socket is connectable', async () =>
|
|||||||
|
|
||||||
test(
|
test(
|
||||||
'launcher start-overlay run forwards socket/backend and stops overlay after mpv exits',
|
'launcher start-overlay run forwards socket/backend and stops overlay after mpv exits',
|
||||||
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
|
{ timeout: 20000 },
|
||||||
async () => {
|
async () => {
|
||||||
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
|
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
|
||||||
const env = makeTestEnv(smokeCase);
|
const env = makeTestEnv(smokeCase);
|
||||||
@@ -355,7 +322,7 @@ test(
|
|||||||
|
|
||||||
test(
|
test(
|
||||||
'launcher starts mpv paused when plugin auto-start visible overlay gate is enabled',
|
'launcher starts mpv paused when plugin auto-start visible overlay gate is enabled',
|
||||||
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
|
{ timeout: 20000 },
|
||||||
async () => {
|
async () => {
|
||||||
await withSmokeCase('autoplay-ready-gate', async (smokeCase) => {
|
await withSmokeCase('autoplay-ready-gate', async (smokeCase) => {
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
|
|||||||
29
package.json
29
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "subminer",
|
"name": "subminer",
|
||||||
"version": "0.4.0",
|
"version": "0.3.0",
|
||||||
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
||||||
"packageManager": "bun@1.3.5",
|
"packageManager": "bun@1.3.5",
|
||||||
"main": "dist/main-entry.js",
|
"main": "dist/main-entry.js",
|
||||||
@@ -8,24 +8,21 @@
|
|||||||
"typecheck": "tsc --noEmit -p tsconfig.typecheck.json",
|
"typecheck": "tsc --noEmit -p tsconfig.typecheck.json",
|
||||||
"typecheck:watch": "tsc --watch --preserveWatchOutput -p tsconfig.typecheck.json",
|
"typecheck:watch": "tsc --watch --preserveWatchOutput -p tsconfig.typecheck.json",
|
||||||
"get-frequency": "bun run scripts/get_frequency.ts --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
"get-frequency": "bun run scripts/get_frequency.ts --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
||||||
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
"get-frequency:electron": "bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
||||||
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
|
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
|
||||||
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
|
"test-yomitan-parser:electron": "bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
|
||||||
"build:yomitan": "node scripts/build-yomitan.mjs",
|
"build": "tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
|
||||||
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
|
|
||||||
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
||||||
"format": "prettier --write .",
|
"format": "prettier --write .",
|
||||||
"format:check": "prettier --check .",
|
"format:check": "prettier --check .",
|
||||||
"format:src": "bash scripts/prettier-scope.sh --write",
|
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts",
|
||||||
"format:check:src": "bash scripts/prettier-scope.sh --check",
|
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js",
|
||||||
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
|
|
||||||
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
|
|
||||||
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
||||||
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
|
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
|
||||||
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
||||||
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
||||||
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
|
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts",
|
||||||
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
||||||
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
||||||
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
||||||
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
||||||
@@ -120,7 +117,7 @@
|
|||||||
],
|
],
|
||||||
"extraResources": [
|
"extraResources": [
|
||||||
{
|
{
|
||||||
"from": "build/yomitan",
|
"from": "vendor/yomitan",
|
||||||
"to": "yomitan"
|
"to": "yomitan"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -131,14 +128,6 @@
|
|||||||
"from": "assets",
|
"from": "assets",
|
||||||
"to": "assets"
|
"to": "assets"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"from": "plugin/subminer",
|
|
||||||
"to": "plugin/subminer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"from": "plugin/subminer.conf",
|
|
||||||
"to": "plugin/subminer.conf"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"from": "dist/scripts/get-mpv-window-macos",
|
"from": "dist/scripts/get-mpv-window-macos",
|
||||||
"to": "scripts/get-mpv-window-macos"
|
"to": "scripts/get-mpv-window-macos"
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
import fs from 'node:fs';
|
|
||||||
import os from 'node:os';
|
|
||||||
import path from 'node:path';
|
|
||||||
import { createHash } from 'node:crypto';
|
|
||||||
import { execFileSync } from 'node:child_process';
|
|
||||||
import { fileURLToPath } from 'node:url';
|
|
||||||
|
|
||||||
const dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
||||||
const repoRoot = path.resolve(dirname, '..');
|
|
||||||
const submoduleDir = path.join(repoRoot, 'vendor', 'subminer-yomitan');
|
|
||||||
const submodulePackagePath = path.join(submoduleDir, 'package.json');
|
|
||||||
const submodulePackageLockPath = path.join(submoduleDir, 'package-lock.json');
|
|
||||||
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
|
|
||||||
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
|
|
||||||
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
|
|
||||||
const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
|
||||||
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
|
|
||||||
|
|
||||||
function run(command, args, cwd) {
|
|
||||||
execFileSync(command, args, { cwd, stdio: 'inherit' });
|
|
||||||
}
|
|
||||||
|
|
||||||
function readCommand(command, args, cwd) {
|
|
||||||
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
function readStamp() {
|
|
||||||
try {
|
|
||||||
return JSON.parse(fs.readFileSync(stampPath, 'utf8'));
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function hashFile(filePath) {
|
|
||||||
const hash = createHash('sha256');
|
|
||||||
hash.update(fs.readFileSync(filePath));
|
|
||||||
return hash.digest('hex');
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureSubmodulePresent() {
|
|
||||||
if (!fs.existsSync(submodulePackagePath)) {
|
|
||||||
throw new Error(
|
|
||||||
'Missing vendor/subminer-yomitan submodule. Run `git submodule update --init --recursive`.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSourceState() {
|
|
||||||
const revision = readCommand('git', ['rev-parse', 'HEAD'], submoduleDir);
|
|
||||||
const dirty = readCommand('git', ['status', '--short', '--untracked-files=no'], submoduleDir);
|
|
||||||
return { revision, dirty };
|
|
||||||
}
|
|
||||||
|
|
||||||
function isBuildCurrent(force) {
|
|
||||||
if (force) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const stamp = readStamp();
|
|
||||||
if (!stamp) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentState = getSourceState();
|
|
||||||
return stamp.revision === currentState.revision && stamp.dirty === currentState.dirty;
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureDependenciesInstalled() {
|
|
||||||
const nodeModulesDir = path.join(submoduleDir, 'node_modules');
|
|
||||||
const currentLockHash = hashFile(submodulePackageLockPath);
|
|
||||||
let installedLockHash = '';
|
|
||||||
try {
|
|
||||||
installedLockHash = fs.readFileSync(dependencyStampPath, 'utf8').trim();
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
|
|
||||||
run(npmCommand, ['ci'], submoduleDir);
|
|
||||||
fs.mkdirSync(nodeModulesDir, { recursive: true });
|
|
||||||
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function installAndBuild() {
|
|
||||||
ensureDependenciesInstalled();
|
|
||||||
run(npmCommand, ['run', 'build', '--', '--target', 'chrome'], submoduleDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractBuild() {
|
|
||||||
if (!fs.existsSync(zipPath)) {
|
|
||||||
throw new Error(`Expected Yomitan build artifact at ${zipPath}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
|
|
||||||
try {
|
|
||||||
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
|
|
||||||
fs.rmSync(buildOutputDir, { recursive: true, force: true });
|
|
||||||
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
|
|
||||||
fs.cpSync(tempDir, buildOutputDir, { recursive: true });
|
|
||||||
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
|
|
||||||
throw new Error(`Extracted Yomitan build missing manifest.json in ${buildOutputDir}`);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function writeStamp() {
|
|
||||||
const state = getSourceState();
|
|
||||||
fs.writeFileSync(
|
|
||||||
stampPath,
|
|
||||||
`${JSON.stringify(
|
|
||||||
{
|
|
||||||
revision: state.revision,
|
|
||||||
dirty: state.dirty,
|
|
||||||
builtAt: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
null,
|
|
||||||
2,
|
|
||||||
)}\n`,
|
|
||||||
'utf8',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function main() {
|
|
||||||
const force = process.argv.includes('--force');
|
|
||||||
ensureSubmodulePresent();
|
|
||||||
|
|
||||||
if (isBuildCurrent(force)) {
|
|
||||||
process.stdout.write(`Yomitan build current: ${buildOutputDir}\n`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
process.stdout.write('Building Yomitan Chrome artifact...\n');
|
|
||||||
installAndBuild();
|
|
||||||
extractBuild();
|
|
||||||
writeStamp();
|
|
||||||
process.stdout.write(`Yomitan extracted to ${buildOutputDir}\n`);
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
||||||
@@ -4,7 +4,6 @@ import process from 'node:process';
|
|||||||
|
|
||||||
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
||||||
import { createFrequencyDictionaryLookup } from '../src/core/services/frequency-dictionary.js';
|
import { createFrequencyDictionaryLookup } from '../src/core/services/frequency-dictionary.js';
|
||||||
import { resolveYomitanExtensionPath as resolveBuiltYomitanExtensionPath } from '../src/core/services/yomitan-extension-paths.js';
|
|
||||||
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
||||||
import type { MergedToken, FrequencyDictionaryLookup } from '../src/types.js';
|
import type { MergedToken, FrequencyDictionaryLookup } from '../src/types.js';
|
||||||
|
|
||||||
@@ -49,7 +48,7 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
let colorBand1 = '#ed8796';
|
let colorBand1 = '#ed8796';
|
||||||
let colorBand2 = '#f5a97f';
|
let colorBand2 = '#f5a97f';
|
||||||
let colorBand3 = '#f9e2af';
|
let colorBand3 = '#f9e2af';
|
||||||
let colorBand4 = '#8bd5ca';
|
let colorBand4 = '#a6e3a1';
|
||||||
let colorBand5 = '#8aadf4';
|
let colorBand5 = '#8aadf4';
|
||||||
let colorKnown = '#a6da95';
|
let colorKnown = '#a6da95';
|
||||||
let colorNPlusOne = '#c6a0f6';
|
let colorNPlusOne = '#c6a0f6';
|
||||||
@@ -95,7 +94,7 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-extension');
|
throw new Error('Missing value for --yomitan-extension');
|
||||||
}
|
}
|
||||||
yomitanExtensionPath = next;
|
yomitanExtensionPath = path.resolve(next);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -104,7 +103,7 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-user-data');
|
throw new Error('Missing value for --yomitan-user-data');
|
||||||
}
|
}
|
||||||
yomitanUserDataPath = next;
|
yomitanUserDataPath = path.resolve(next);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,12 +225,12 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-extension=')) {
|
if (arg.startsWith('--yomitan-extension=')) {
|
||||||
yomitanExtensionPath = arg.slice('--yomitan-extension='.length);
|
yomitanExtensionPath = path.resolve(arg.slice('--yomitan-extension='.length));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-user-data=')) {
|
if (arg.startsWith('--yomitan-user-data=')) {
|
||||||
yomitanUserDataPath = arg.slice('--yomitan-user-data='.length);
|
yomitanUserDataPath = path.resolve(arg.slice('--yomitan-user-data='.length));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -525,10 +524,7 @@ function destroyUnknownParserWindow(window: unknown): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createYomitanRuntimeState(
|
async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanRuntimeState> {
|
||||||
userDataPath: string,
|
|
||||||
extensionPath?: string,
|
|
||||||
): Promise<YomitanRuntimeState> {
|
|
||||||
const state: YomitanRuntimeState = {
|
const state: YomitanRuntimeState = {
|
||||||
yomitanExt: null,
|
yomitanExt: null,
|
||||||
parserWindow: null,
|
parserWindow: null,
|
||||||
@@ -551,7 +547,6 @@ async function createYomitanRuntimeState(
|
|||||||
const loadYomitanExtension = (await import('../src/core/services/yomitan-extension-loader.js'))
|
const loadYomitanExtension = (await import('../src/core/services/yomitan-extension-loader.js'))
|
||||||
.loadYomitanExtension as (options: {
|
.loadYomitanExtension as (options: {
|
||||||
userDataPath: string;
|
userDataPath: string;
|
||||||
extensionPath?: string;
|
|
||||||
getYomitanParserWindow: () => unknown;
|
getYomitanParserWindow: () => unknown;
|
||||||
setYomitanParserWindow: (window: unknown) => void;
|
setYomitanParserWindow: (window: unknown) => void;
|
||||||
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
||||||
@@ -561,7 +556,6 @@ async function createYomitanRuntimeState(
|
|||||||
|
|
||||||
const extension = await loadYomitanExtension({
|
const extension = await loadYomitanExtension({
|
||||||
userDataPath,
|
userDataPath,
|
||||||
extensionPath,
|
|
||||||
getYomitanParserWindow: () => state.parserWindow,
|
getYomitanParserWindow: () => state.parserWindow,
|
||||||
setYomitanParserWindow: (window) => {
|
setYomitanParserWindow: (window) => {
|
||||||
state.parserWindow = window;
|
state.parserWindow = window;
|
||||||
@@ -595,16 +589,17 @@ async function createYomitanRuntimeStateWithSearch(
|
|||||||
userDataPath: string,
|
userDataPath: string,
|
||||||
extensionPath?: string,
|
extensionPath?: string,
|
||||||
): Promise<YomitanRuntimeState> {
|
): Promise<YomitanRuntimeState> {
|
||||||
const resolvedExtensionPath = resolveBuiltYomitanExtensionPath({
|
const preferredPath = extensionPath ? path.resolve(extensionPath) : undefined;
|
||||||
explicitPath: extensionPath,
|
const defaultVendorPath = path.resolve(process.cwd(), 'vendor', 'yomitan');
|
||||||
cwd: process.cwd(),
|
const candidates = [...(preferredPath ? [preferredPath] : []), defaultVendorPath];
|
||||||
});
|
|
||||||
const candidates = resolvedExtensionPath ? [resolvedExtensionPath] : [];
|
|
||||||
|
|
||||||
for (const candidate of candidates) {
|
for (const candidate of candidates) {
|
||||||
|
if (!candidate) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
|
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
|
||||||
const state = await createYomitanRuntimeState(userDataPath, candidate);
|
const state = await createYomitanRuntimeState(userDataPath);
|
||||||
if (state.available) {
|
if (state.available) {
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
@@ -618,7 +613,7 @@ async function createYomitanRuntimeStateWithSearch(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return createYomitanRuntimeState(userDataPath, resolvedExtensionPath ?? undefined);
|
return createYomitanRuntimeState(userDataPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getFrequencyLookup(dictionaryPath: string): Promise<FrequencyDictionaryLookup> {
|
async function getFrequencyLookup(dictionaryPath: string): Promise<FrequencyDictionaryLookup> {
|
||||||
|
|||||||
@@ -1,16 +1,261 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# SubMiner - All-in-one sentence mining overlay
|
||||||
|
# Copyright (C) 2024 sudacode
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# patch-yomitan.sh - Apply Electron compatibility patches to Yomitan
|
||||||
|
#
|
||||||
|
# This script applies the necessary patches to make Yomitan work in Electron
|
||||||
|
# after upgrading to a new version. Run this after extracting a fresh Yomitan release.
|
||||||
|
#
|
||||||
|
# Usage: ./patch-yomitan.sh [yomitan_dir]
|
||||||
|
# yomitan_dir: Path to the Yomitan directory (default: vendor/yomitan)
|
||||||
|
#
|
||||||
|
|
||||||
set -euo pipefail
|
set -e
|
||||||
|
|
||||||
cat <<'EOF'
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
patch-yomitan.sh is retired.
|
YOMITAN_DIR="${1:-$SCRIPT_DIR/../vendor/yomitan}"
|
||||||
|
|
||||||
SubMiner now uses the forked source submodule at vendor/subminer-yomitan and builds the
|
if [ ! -d "$YOMITAN_DIR" ]; then
|
||||||
Chromium extension artifact into build/yomitan.
|
echo "Error: Yomitan directory not found: $YOMITAN_DIR"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
Use:
|
echo "Patching Yomitan in: $YOMITAN_DIR"
|
||||||
git submodule update --init --recursive
|
|
||||||
bun run build:yomitan
|
|
||||||
|
|
||||||
If you need to change Electron compatibility behavior, patch the forked source repo and rebuild.
|
PERMISSIONS_UTIL="$YOMITAN_DIR/js/data/permissions-util.js"
|
||||||
EOF
|
|
||||||
|
if [ ! -f "$PERMISSIONS_UTIL" ]; then
|
||||||
|
echo "Error: permissions-util.js not found at $PERMISSIONS_UTIL"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Patching permissions-util.js..."
|
||||||
|
|
||||||
|
if grep -q "Electron workaround" "$PERMISSIONS_UTIL"; then
|
||||||
|
echo " - Already patched, skipping"
|
||||||
|
else
|
||||||
|
cat > "$PERMISSIONS_UTIL.tmp" << 'PATCH_EOF'
|
||||||
|
/*
|
||||||
|
* Copyright (C) 2023-2025 Yomitan Authors
|
||||||
|
* Copyright (C) 2021-2022 Yomichan Authors
|
||||||
|
*
|
||||||
|
* This program is free software: you can redistribute it and/or modify
|
||||||
|
* it under the terms of the GNU General Public License as published by
|
||||||
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
|
* (at your option) any later version.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
* GNU General Public License for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU General Public License
|
||||||
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {getFieldMarkers} from './anki-util.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function returns whether an Anki field marker might require clipboard permissions.
|
||||||
|
* This is speculative and may not guarantee that the field marker actually does require the permission,
|
||||||
|
* as the custom handlebars template is not deeply inspected.
|
||||||
|
* @param {string} marker
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function ankiFieldMarkerMayUseClipboard(marker) {
|
||||||
|
switch (marker) {
|
||||||
|
case 'clipboard-image':
|
||||||
|
case 'clipboard-text':
|
||||||
|
return true;
|
||||||
|
default:
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {chrome.permissions.Permissions} permissions
|
||||||
|
* @returns {Promise<boolean>}
|
||||||
|
*/
|
||||||
|
export function hasPermissions(permissions) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
chrome.permissions.contains(permissions, (result) => {
|
||||||
|
const e = chrome.runtime.lastError;
|
||||||
|
if (e) {
|
||||||
|
reject(new Error(e.message));
|
||||||
|
} else {
|
||||||
|
resolve(result);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {chrome.permissions.Permissions} permissions
|
||||||
|
* @param {boolean} shouldHave
|
||||||
|
* @returns {Promise<boolean>}
|
||||||
|
*/
|
||||||
|
export function setPermissionsGranted(permissions, shouldHave) {
|
||||||
|
return (
|
||||||
|
shouldHave ?
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
chrome.permissions.request(permissions, (result) => {
|
||||||
|
const e = chrome.runtime.lastError;
|
||||||
|
if (e) {
|
||||||
|
reject(new Error(e.message));
|
||||||
|
} else {
|
||||||
|
resolve(result);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}) :
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
chrome.permissions.remove(permissions, (result) => {
|
||||||
|
const e = chrome.runtime.lastError;
|
||||||
|
if (e) {
|
||||||
|
reject(new Error(e.message));
|
||||||
|
} else {
|
||||||
|
resolve(!result);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns {Promise<chrome.permissions.Permissions>}
|
||||||
|
*/
|
||||||
|
export function getAllPermissions() {
|
||||||
|
// Electron workaround - chrome.permissions.getAll() not available
|
||||||
|
return Promise.resolve({
|
||||||
|
origins: ["<all_urls>"],
|
||||||
|
permissions: ["clipboardWrite", "storage", "unlimitedStorage", "scripting", "contextMenus"]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} fieldValue
|
||||||
|
* @returns {string[]}
|
||||||
|
*/
|
||||||
|
export function getRequiredPermissionsForAnkiFieldValue(fieldValue) {
|
||||||
|
const markers = getFieldMarkers(fieldValue);
|
||||||
|
for (const marker of markers) {
|
||||||
|
if (ankiFieldMarkerMayUseClipboard(marker)) {
|
||||||
|
return ['clipboardRead'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {chrome.permissions.Permissions} permissions
|
||||||
|
* @param {import('settings').ProfileOptions} options
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
export function hasRequiredPermissionsForOptions(permissions, options) {
|
||||||
|
const permissionsSet = new Set(permissions.permissions);
|
||||||
|
|
||||||
|
if (!permissionsSet.has('nativeMessaging') && (options.parsing.enableMecabParser || options.general.enableYomitanApi)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!permissionsSet.has('clipboardRead')) {
|
||||||
|
if (options.clipboard.enableBackgroundMonitor || options.clipboard.enableSearchPageMonitor) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const fieldsList = options.anki.cardFormats.map((cardFormat) => cardFormat.fields);
|
||||||
|
|
||||||
|
for (const fields of fieldsList) {
|
||||||
|
for (const {value: fieldValue} of Object.values(fields)) {
|
||||||
|
const markers = getFieldMarkers(fieldValue);
|
||||||
|
for (const marker of markers) {
|
||||||
|
if (ankiFieldMarkerMayUseClipboard(marker)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
PATCH_EOF
|
||||||
|
|
||||||
|
mv "$PERMISSIONS_UTIL.tmp" "$PERMISSIONS_UTIL"
|
||||||
|
echo " - Patched successfully"
|
||||||
|
fi
|
||||||
|
|
||||||
|
OPTIONS_SCHEMA="$YOMITAN_DIR/data/schemas/options-schema.json"
|
||||||
|
|
||||||
|
if [ ! -f "$OPTIONS_SCHEMA" ]; then
|
||||||
|
echo "Error: options-schema.json not found at $OPTIONS_SCHEMA"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Patching options-schema.json..."
|
||||||
|
|
||||||
|
if grep -q '"selectText".*"default": true' "$OPTIONS_SCHEMA"; then
|
||||||
|
sed -i '/"selectText": {/,/"default":/{s/"default": true/"default": false/}' "$OPTIONS_SCHEMA"
|
||||||
|
echo " - Changed selectText default to false"
|
||||||
|
elif grep -q '"selectText".*"default": false' "$OPTIONS_SCHEMA"; then
|
||||||
|
echo " - selectText already set to false, skipping"
|
||||||
|
else
|
||||||
|
echo " - Warning: Could not find selectText setting"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q '"layoutAwareScan".*"default": true' "$OPTIONS_SCHEMA"; then
|
||||||
|
sed -i '/"layoutAwareScan": {/,/"default":/{s/"default": true/"default": false/}' "$OPTIONS_SCHEMA"
|
||||||
|
echo " - Changed layoutAwareScan default to false"
|
||||||
|
elif grep -q '"layoutAwareScan".*"default": false' "$OPTIONS_SCHEMA"; then
|
||||||
|
echo " - layoutAwareScan already set to false, skipping"
|
||||||
|
else
|
||||||
|
echo " - Warning: Could not find layoutAwareScan setting"
|
||||||
|
fi
|
||||||
|
|
||||||
|
POPUP_JS="$YOMITAN_DIR/js/app/popup.js"
|
||||||
|
|
||||||
|
if [ ! -f "$POPUP_JS" ]; then
|
||||||
|
echo "Error: popup.js not found at $POPUP_JS"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Patching popup.js..."
|
||||||
|
|
||||||
|
if grep -q "yomitan-popup-shown" "$POPUP_JS"; then
|
||||||
|
echo " - Already patched, skipping"
|
||||||
|
else
|
||||||
|
# Add the visibility event dispatch after the existing _onVisibleChange code
|
||||||
|
# We need to add it after: void this._invokeSafe('displayVisibilityChanged', {value});
|
||||||
|
sed -i "/void this._invokeSafe('displayVisibilityChanged', {value});/a\\
|
||||||
|
\\
|
||||||
|
// Dispatch custom events for popup visibility (Electron integration)\\
|
||||||
|
if (value) {\\
|
||||||
|
window.dispatchEvent(new CustomEvent('yomitan-popup-shown'));\\
|
||||||
|
} else {\\
|
||||||
|
window.dispatchEvent(new CustomEvent('yomitan-popup-hidden'));\\
|
||||||
|
}" "$POPUP_JS"
|
||||||
|
echo " - Added visibility events"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Yomitan patching complete!"
|
||||||
|
echo ""
|
||||||
|
echo "Changes applied:"
|
||||||
|
echo " 1. permissions-util.js: Hardcoded permissions (Electron workaround)"
|
||||||
|
echo " 2. options-schema.json: selectText=false, layoutAwareScan=false"
|
||||||
|
echo " 3. popup.js: Added yomitan-popup-shown/hidden events"
|
||||||
|
echo ""
|
||||||
|
echo "To verify: Run 'bun run dev' and check for 'Yomitan extension loaded successfully'"
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
||||||
cd "$ROOT_DIR"
|
|
||||||
|
|
||||||
paths=(
|
|
||||||
"package.json"
|
|
||||||
"tsconfig.json"
|
|
||||||
"tsconfig.renderer.json"
|
|
||||||
"tsconfig.typecheck.json"
|
|
||||||
".prettierrc.json"
|
|
||||||
".github"
|
|
||||||
"build"
|
|
||||||
"launcher"
|
|
||||||
"scripts"
|
|
||||||
"src"
|
|
||||||
)
|
|
||||||
|
|
||||||
exec bunx prettier "$@" "${paths[@]}"
|
|
||||||
@@ -4,7 +4,6 @@ import path from 'node:path';
|
|||||||
import process from 'node:process';
|
import process from 'node:process';
|
||||||
|
|
||||||
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
||||||
import { resolveYomitanExtensionPath as resolveBuiltYomitanExtensionPath } from '../src/core/services/yomitan-extension-paths.js';
|
|
||||||
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
||||||
import type { MergedToken } from '../src/types.js';
|
import type { MergedToken } from '../src/types.js';
|
||||||
|
|
||||||
@@ -113,12 +112,12 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-extension');
|
throw new Error('Missing value for --yomitan-extension');
|
||||||
}
|
}
|
||||||
yomitanExtensionPath = next;
|
yomitanExtensionPath = path.resolve(next);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-extension=')) {
|
if (arg.startsWith('--yomitan-extension=')) {
|
||||||
yomitanExtensionPath = arg.slice('--yomitan-extension='.length);
|
yomitanExtensionPath = path.resolve(arg.slice('--yomitan-extension='.length));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -127,12 +126,12 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-user-data');
|
throw new Error('Missing value for --yomitan-user-data');
|
||||||
}
|
}
|
||||||
yomitanUserDataPath = next;
|
yomitanUserDataPath = path.resolve(next);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-user-data=')) {
|
if (arg.startsWith('--yomitan-user-data=')) {
|
||||||
yomitanUserDataPath = arg.slice('--yomitan-user-data='.length);
|
yomitanUserDataPath = path.resolve(arg.slice('--yomitan-user-data='.length));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -373,10 +372,21 @@ function findSelectedCandidateIndexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function resolveYomitanExtensionPath(explicitPath?: string): string | null {
|
function resolveYomitanExtensionPath(explicitPath?: string): string | null {
|
||||||
return resolveBuiltYomitanExtensionPath({
|
const candidates = [
|
||||||
explicitPath,
|
explicitPath ? path.resolve(explicitPath) : null,
|
||||||
cwd: process.cwd(),
|
path.resolve(process.cwd(), 'vendor', 'yomitan'),
|
||||||
});
|
];
|
||||||
|
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
if (!candidate) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeState> {
|
async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeState> {
|
||||||
@@ -410,7 +420,7 @@ async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeS
|
|||||||
|
|
||||||
const extensionPath = resolveYomitanExtensionPath(options.yomitanExtensionPath);
|
const extensionPath = resolveYomitanExtensionPath(options.yomitanExtensionPath);
|
||||||
if (!extensionPath) {
|
if (!extensionPath) {
|
||||||
state.note = 'no built Yomitan extension directory found; run `bun run build:yomitan`';
|
state.note = 'no Yomitan extension directory found';
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -222,11 +222,9 @@ test('AnkiIntegration does not allocate proxy server when proxy transport is dis
|
|||||||
);
|
);
|
||||||
|
|
||||||
const privateState = integration as unknown as {
|
const privateState = integration as unknown as {
|
||||||
runtime: {
|
|
||||||
proxyServer: unknown | null;
|
proxyServer: unknown | null;
|
||||||
};
|
};
|
||||||
};
|
assert.equal(privateState.proxyServer, null);
|
||||||
assert.equal(privateState.runtime.proxyServer, null);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test('FieldGroupingMergeCollaborator synchronizes ExpressionAudio from merged SentenceAudio', async () => {
|
test('FieldGroupingMergeCollaborator synchronizes ExpressionAudio from merged SentenceAudio', async () => {
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ import { FieldGroupingService } from './anki-integration/field-grouping';
|
|||||||
import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge';
|
import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge';
|
||||||
import { NoteUpdateWorkflow } from './anki-integration/note-update-workflow';
|
import { NoteUpdateWorkflow } from './anki-integration/note-update-workflow';
|
||||||
import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow';
|
import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow';
|
||||||
import { AnkiIntegrationRuntime, normalizeAnkiIntegrationConfig } from './anki-integration/runtime';
|
|
||||||
|
|
||||||
const log = createLogger('anki').child('integration');
|
const log = createLogger('anki').child('integration');
|
||||||
|
|
||||||
@@ -114,6 +113,8 @@ export class AnkiIntegration {
|
|||||||
private timingTracker: SubtitleTimingTracker;
|
private timingTracker: SubtitleTimingTracker;
|
||||||
private config: AnkiConnectConfig;
|
private config: AnkiConnectConfig;
|
||||||
private pollingRunner!: PollingRunner;
|
private pollingRunner!: PollingRunner;
|
||||||
|
private proxyServer: AnkiConnectProxyServer | null = null;
|
||||||
|
private started = false;
|
||||||
private previousNoteIds = new Set<number>();
|
private previousNoteIds = new Set<number>();
|
||||||
private mpvClient: MpvClient;
|
private mpvClient: MpvClient;
|
||||||
private osdCallback: ((text: string) => void) | null = null;
|
private osdCallback: ((text: string) => void) | null = null;
|
||||||
@@ -134,7 +135,6 @@ export class AnkiIntegration {
|
|||||||
private fieldGroupingService: FieldGroupingService;
|
private fieldGroupingService: FieldGroupingService;
|
||||||
private noteUpdateWorkflow: NoteUpdateWorkflow;
|
private noteUpdateWorkflow: NoteUpdateWorkflow;
|
||||||
private fieldGroupingWorkflow: FieldGroupingWorkflow;
|
private fieldGroupingWorkflow: FieldGroupingWorkflow;
|
||||||
private runtime: AnkiIntegrationRuntime;
|
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
config: AnkiConnectConfig,
|
config: AnkiConnectConfig,
|
||||||
@@ -148,7 +148,7 @@ export class AnkiIntegration {
|
|||||||
}) => Promise<KikuFieldGroupingChoice>,
|
}) => Promise<KikuFieldGroupingChoice>,
|
||||||
knownWordCacheStatePath?: string,
|
knownWordCacheStatePath?: string,
|
||||||
) {
|
) {
|
||||||
this.config = normalizeAnkiIntegrationConfig(config);
|
this.config = this.normalizeConfig(config);
|
||||||
this.client = new AnkiConnectClient(this.config.url!);
|
this.client = new AnkiConnectClient(this.config.url!);
|
||||||
this.mediaGenerator = new MediaGenerator();
|
this.mediaGenerator = new MediaGenerator();
|
||||||
this.timingTracker = timingTracker;
|
this.timingTracker = timingTracker;
|
||||||
@@ -163,7 +163,6 @@ export class AnkiIntegration {
|
|||||||
this.fieldGroupingService = this.createFieldGroupingService();
|
this.fieldGroupingService = this.createFieldGroupingService();
|
||||||
this.noteUpdateWorkflow = this.createNoteUpdateWorkflow();
|
this.noteUpdateWorkflow = this.createNoteUpdateWorkflow();
|
||||||
this.fieldGroupingWorkflow = this.createFieldGroupingWorkflow();
|
this.fieldGroupingWorkflow = this.createFieldGroupingWorkflow();
|
||||||
this.runtime = this.createRuntime(config);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private createFieldGroupingMergeCollaborator(): FieldGroupingMergeCollaborator {
|
private createFieldGroupingMergeCollaborator(): FieldGroupingMergeCollaborator {
|
||||||
@@ -183,6 +182,75 @@ export class AnkiIntegration {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private normalizeConfig(config: AnkiConnectConfig): AnkiConnectConfig {
|
||||||
|
const resolvedUrl =
|
||||||
|
typeof config.url === 'string' && config.url.trim().length > 0
|
||||||
|
? config.url.trim()
|
||||||
|
: DEFAULT_ANKI_CONNECT_CONFIG.url;
|
||||||
|
const proxySource =
|
||||||
|
config.proxy && typeof config.proxy === 'object'
|
||||||
|
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
|
||||||
|
: {};
|
||||||
|
const normalizedProxyPort =
|
||||||
|
typeof proxySource.port === 'number' &&
|
||||||
|
Number.isInteger(proxySource.port) &&
|
||||||
|
proxySource.port >= 1 &&
|
||||||
|
proxySource.port <= 65535
|
||||||
|
? proxySource.port
|
||||||
|
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.port;
|
||||||
|
const normalizedProxyHost =
|
||||||
|
typeof proxySource.host === 'string' && proxySource.host.trim().length > 0
|
||||||
|
? proxySource.host.trim()
|
||||||
|
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.host;
|
||||||
|
const normalizedProxyUpstreamUrl =
|
||||||
|
typeof proxySource.upstreamUrl === 'string' && proxySource.upstreamUrl.trim().length > 0
|
||||||
|
? proxySource.upstreamUrl.trim()
|
||||||
|
: resolvedUrl;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG,
|
||||||
|
...config,
|
||||||
|
url: resolvedUrl,
|
||||||
|
fields: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.fields,
|
||||||
|
...(config.fields ?? {}),
|
||||||
|
},
|
||||||
|
proxy: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.proxy,
|
||||||
|
...(config.proxy ?? {}),
|
||||||
|
enabled: proxySource.enabled === true,
|
||||||
|
host: normalizedProxyHost,
|
||||||
|
port: normalizedProxyPort,
|
||||||
|
upstreamUrl: normalizedProxyUpstreamUrl,
|
||||||
|
},
|
||||||
|
ai: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.ai,
|
||||||
|
...(config.openRouter ?? {}),
|
||||||
|
...(config.ai ?? {}),
|
||||||
|
},
|
||||||
|
media: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.media,
|
||||||
|
...(config.media ?? {}),
|
||||||
|
},
|
||||||
|
behavior: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
|
||||||
|
...(config.behavior ?? {}),
|
||||||
|
},
|
||||||
|
metadata: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.metadata,
|
||||||
|
...(config.metadata ?? {}),
|
||||||
|
},
|
||||||
|
isLapis: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.isLapis,
|
||||||
|
...(config.isLapis ?? {}),
|
||||||
|
},
|
||||||
|
isKiku: {
|
||||||
|
...DEFAULT_ANKI_CONNECT_CONFIG.isKiku,
|
||||||
|
...(config.isKiku ?? {}),
|
||||||
|
},
|
||||||
|
} as AnkiConnectConfig;
|
||||||
|
}
|
||||||
|
|
||||||
private createKnownWordCache(knownWordCacheStatePath?: string): KnownWordCacheManager {
|
private createKnownWordCache(knownWordCacheStatePath?: string): KnownWordCacheManager {
|
||||||
return new KnownWordCacheManager({
|
return new KnownWordCacheManager({
|
||||||
client: {
|
client: {
|
||||||
@@ -234,20 +302,11 @@ export class AnkiIntegration {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private createRuntime(initialConfig: AnkiConnectConfig): AnkiIntegrationRuntime {
|
private getOrCreateProxyServer(): AnkiConnectProxyServer {
|
||||||
return new AnkiIntegrationRuntime({
|
if (!this.proxyServer) {
|
||||||
initialConfig,
|
this.proxyServer = this.createProxyServer();
|
||||||
pollingRunner: this.pollingRunner,
|
}
|
||||||
knownWordCache: this.knownWordCache,
|
return this.proxyServer;
|
||||||
proxyServerFactory: () => this.createProxyServer(),
|
|
||||||
logInfo: (message, ...args) => log.info(message, ...args),
|
|
||||||
logWarn: (message, ...args) => log.warn(message, ...args),
|
|
||||||
logError: (message, ...args) => log.error(message, ...args),
|
|
||||||
onConfigChanged: (nextConfig) => {
|
|
||||||
this.config = nextConfig;
|
|
||||||
this.client = new AnkiConnectClient(nextConfig.url!);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private createCardCreationService(): CardCreationService {
|
private createCardCreationService(): CardCreationService {
|
||||||
@@ -458,6 +517,14 @@ export class AnkiIntegration {
|
|||||||
return this.config.nPlusOne?.highlightEnabled === true;
|
return this.config.nPlusOne?.highlightEnabled === true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private startKnownWordCacheLifecycle(): void {
|
||||||
|
this.knownWordCache.startLifecycle();
|
||||||
|
}
|
||||||
|
|
||||||
|
private stopKnownWordCacheLifecycle(): void {
|
||||||
|
this.knownWordCache.stopLifecycle();
|
||||||
|
}
|
||||||
|
|
||||||
private getConfiguredAnkiTags(): string[] {
|
private getConfiguredAnkiTags(): string[] {
|
||||||
if (!Array.isArray(this.config.tags)) {
|
if (!Array.isArray(this.config.tags)) {
|
||||||
return [];
|
return [];
|
||||||
@@ -539,12 +606,64 @@ export class AnkiIntegration {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private isProxyTransportEnabled(config: AnkiConnectConfig = this.config): boolean {
|
||||||
|
return config.proxy?.enabled === true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getTransportConfigKey(config: AnkiConnectConfig = this.config): string {
|
||||||
|
if (this.isProxyTransportEnabled(config)) {
|
||||||
|
return [
|
||||||
|
'proxy',
|
||||||
|
config.proxy?.host ?? '',
|
||||||
|
String(config.proxy?.port ?? ''),
|
||||||
|
config.proxy?.upstreamUrl ?? '',
|
||||||
|
].join(':');
|
||||||
|
}
|
||||||
|
return ['polling', String(config.pollingRate ?? DEFAULT_ANKI_CONNECT_CONFIG.pollingRate)].join(
|
||||||
|
':',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private startTransport(): void {
|
||||||
|
if (this.isProxyTransportEnabled()) {
|
||||||
|
const proxyHost = this.config.proxy?.host ?? '127.0.0.1';
|
||||||
|
const proxyPort = this.config.proxy?.port ?? 8766;
|
||||||
|
const upstreamUrl = this.config.proxy?.upstreamUrl ?? this.config.url ?? '';
|
||||||
|
this.getOrCreateProxyServer().start({
|
||||||
|
host: proxyHost,
|
||||||
|
port: proxyPort,
|
||||||
|
upstreamUrl,
|
||||||
|
});
|
||||||
|
log.info(
|
||||||
|
`Starting AnkiConnect integration with local proxy: http://${proxyHost}:${proxyPort} -> ${upstreamUrl}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('Starting AnkiConnect integration with polling rate:', this.config.pollingRate);
|
||||||
|
this.pollingRunner.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
private stopTransport(): void {
|
||||||
|
this.pollingRunner.stop();
|
||||||
|
this.proxyServer?.stop();
|
||||||
|
}
|
||||||
|
|
||||||
start(): void {
|
start(): void {
|
||||||
this.runtime.start();
|
if (this.started) {
|
||||||
|
this.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.startKnownWordCacheLifecycle();
|
||||||
|
this.startTransport();
|
||||||
|
this.started = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
stop(): void {
|
stop(): void {
|
||||||
this.runtime.stop();
|
this.stopTransport();
|
||||||
|
this.stopKnownWordCacheLifecycle();
|
||||||
|
this.started = false;
|
||||||
|
log.info('Stopped AnkiConnect integration');
|
||||||
}
|
}
|
||||||
|
|
||||||
private async processNewCard(
|
private async processNewCard(
|
||||||
@@ -1097,7 +1216,58 @@ export class AnkiIntegration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
|
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
|
||||||
this.runtime.applyRuntimeConfigPatch(patch);
|
const wasEnabled = this.config.nPlusOne?.highlightEnabled === true;
|
||||||
|
const previousTransportKey = this.getTransportConfigKey(this.config);
|
||||||
|
|
||||||
|
const mergedConfig: AnkiConnectConfig = {
|
||||||
|
...this.config,
|
||||||
|
...patch,
|
||||||
|
nPlusOne:
|
||||||
|
patch.nPlusOne !== undefined
|
||||||
|
? {
|
||||||
|
...(this.config.nPlusOne ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne),
|
||||||
|
...patch.nPlusOne,
|
||||||
|
}
|
||||||
|
: this.config.nPlusOne,
|
||||||
|
fields:
|
||||||
|
patch.fields !== undefined
|
||||||
|
? { ...this.config.fields, ...patch.fields }
|
||||||
|
: this.config.fields,
|
||||||
|
media:
|
||||||
|
patch.media !== undefined ? { ...this.config.media, ...patch.media } : this.config.media,
|
||||||
|
behavior:
|
||||||
|
patch.behavior !== undefined
|
||||||
|
? { ...this.config.behavior, ...patch.behavior }
|
||||||
|
: this.config.behavior,
|
||||||
|
proxy:
|
||||||
|
patch.proxy !== undefined ? { ...this.config.proxy, ...patch.proxy } : this.config.proxy,
|
||||||
|
metadata:
|
||||||
|
patch.metadata !== undefined
|
||||||
|
? { ...this.config.metadata, ...patch.metadata }
|
||||||
|
: this.config.metadata,
|
||||||
|
isLapis:
|
||||||
|
patch.isLapis !== undefined
|
||||||
|
? { ...this.config.isLapis, ...patch.isLapis }
|
||||||
|
: this.config.isLapis,
|
||||||
|
isKiku:
|
||||||
|
patch.isKiku !== undefined
|
||||||
|
? { ...this.config.isKiku, ...patch.isKiku }
|
||||||
|
: this.config.isKiku,
|
||||||
|
};
|
||||||
|
this.config = this.normalizeConfig(mergedConfig);
|
||||||
|
|
||||||
|
if (wasEnabled && this.config.nPlusOne?.highlightEnabled === false) {
|
||||||
|
this.stopKnownWordCacheLifecycle();
|
||||||
|
this.knownWordCache.clearKnownWordCacheState();
|
||||||
|
} else {
|
||||||
|
this.startKnownWordCacheLifecycle();
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextTransportKey = this.getTransportConfigKey(this.config);
|
||||||
|
if (this.started && previousTransportKey !== nextTransportKey) {
|
||||||
|
this.stopTransport();
|
||||||
|
this.startTransport();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
destroy(): void {
|
destroy(): void {
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ export class FieldGroupingWorkflow {
|
|||||||
|
|
||||||
async handleManual(
|
async handleManual(
|
||||||
originalNoteId: number,
|
originalNoteId: number,
|
||||||
_newNoteId: number,
|
newNoteId: number,
|
||||||
newNoteInfo: FieldGroupingWorkflowNoteInfo,
|
newNoteInfo: FieldGroupingWorkflowNoteInfo,
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const callback = await this.resolveFieldGroupingCallback();
|
const callback = await this.resolveFieldGroupingCallback();
|
||||||
|
|||||||
@@ -1,108 +0,0 @@
|
|||||||
import test from 'node:test';
|
|
||||||
import assert from 'node:assert/strict';
|
|
||||||
|
|
||||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
|
||||||
import type { AnkiConnectConfig } from '../types';
|
|
||||||
import { AnkiIntegrationRuntime } from './runtime';
|
|
||||||
|
|
||||||
function createRuntime(
|
|
||||||
config: Partial<AnkiConnectConfig> = {},
|
|
||||||
overrides: Partial<ConstructorParameters<typeof AnkiIntegrationRuntime>[0]> = {},
|
|
||||||
) {
|
|
||||||
const calls: string[] = [];
|
|
||||||
|
|
||||||
const runtime = new AnkiIntegrationRuntime({
|
|
||||||
initialConfig: config as AnkiConnectConfig,
|
|
||||||
pollingRunner: {
|
|
||||||
start: () => calls.push('polling:start'),
|
|
||||||
stop: () => calls.push('polling:stop'),
|
|
||||||
},
|
|
||||||
knownWordCache: {
|
|
||||||
startLifecycle: () => calls.push('known:start'),
|
|
||||||
stopLifecycle: () => calls.push('known:stop'),
|
|
||||||
clearKnownWordCacheState: () => calls.push('known:clear'),
|
|
||||||
},
|
|
||||||
proxyServerFactory: () => ({
|
|
||||||
start: ({ host, port, upstreamUrl }) =>
|
|
||||||
calls.push(`proxy:start:${host}:${port}:${upstreamUrl}`),
|
|
||||||
stop: () => calls.push('proxy:stop'),
|
|
||||||
}),
|
|
||||||
logInfo: () => undefined,
|
|
||||||
logWarn: () => undefined,
|
|
||||||
logError: () => undefined,
|
|
||||||
onConfigChanged: () => undefined,
|
|
||||||
...overrides,
|
|
||||||
});
|
|
||||||
|
|
||||||
return { runtime, calls };
|
|
||||||
}
|
|
||||||
|
|
||||||
test('AnkiIntegrationRuntime normalizes url and proxy defaults', () => {
|
|
||||||
const { runtime } = createRuntime({
|
|
||||||
url: ' http://anki.local:8765 ',
|
|
||||||
proxy: {
|
|
||||||
enabled: true,
|
|
||||||
host: ' 0.0.0.0 ',
|
|
||||||
port: 7001,
|
|
||||||
upstreamUrl: ' ',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const normalized = runtime.getConfig();
|
|
||||||
|
|
||||||
assert.equal(normalized.url, 'http://anki.local:8765');
|
|
||||||
assert.equal(normalized.proxy?.enabled, true);
|
|
||||||
assert.equal(normalized.proxy?.host, '0.0.0.0');
|
|
||||||
assert.equal(normalized.proxy?.port, 7001);
|
|
||||||
assert.equal(normalized.proxy?.upstreamUrl, 'http://anki.local:8765');
|
|
||||||
assert.equal(
|
|
||||||
normalized.media?.fallbackDuration,
|
|
||||||
DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
|
|
||||||
const { runtime, calls } = createRuntime({
|
|
||||||
proxy: {
|
|
||||||
enabled: true,
|
|
||||||
host: '127.0.0.1',
|
|
||||||
port: 9999,
|
|
||||||
upstreamUrl: 'http://upstream:8765',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
runtime.start();
|
|
||||||
|
|
||||||
assert.deepEqual(calls, ['known:start', 'proxy:start:127.0.0.1:9999:http://upstream:8765']);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
|
|
||||||
const { runtime, calls } = createRuntime({
|
|
||||||
nPlusOne: {
|
|
||||||
highlightEnabled: true,
|
|
||||||
},
|
|
||||||
pollingRate: 250,
|
|
||||||
});
|
|
||||||
|
|
||||||
runtime.start();
|
|
||||||
calls.length = 0;
|
|
||||||
|
|
||||||
runtime.applyRuntimeConfigPatch({
|
|
||||||
nPlusOne: {
|
|
||||||
highlightEnabled: false,
|
|
||||||
},
|
|
||||||
proxy: {
|
|
||||||
enabled: true,
|
|
||||||
host: '127.0.0.1',
|
|
||||||
port: 8766,
|
|
||||||
upstreamUrl: 'http://127.0.0.1:8765',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(calls, [
|
|
||||||
'known:stop',
|
|
||||||
'known:clear',
|
|
||||||
'polling:stop',
|
|
||||||
'proxy:start:127.0.0.1:8766:http://127.0.0.1:8765',
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
@@ -1,232 +0,0 @@
|
|||||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
|
||||||
import type { AnkiConnectConfig } from '../types';
|
|
||||||
|
|
||||||
export interface AnkiIntegrationRuntimeProxyServer {
|
|
||||||
start(options: { host: string; port: number; upstreamUrl: string }): void;
|
|
||||||
stop(): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface AnkiIntegrationRuntimeDeps {
|
|
||||||
initialConfig: AnkiConnectConfig;
|
|
||||||
pollingRunner: {
|
|
||||||
start(): void;
|
|
||||||
stop(): void;
|
|
||||||
};
|
|
||||||
knownWordCache: {
|
|
||||||
startLifecycle(): void;
|
|
||||||
stopLifecycle(): void;
|
|
||||||
clearKnownWordCacheState(): void;
|
|
||||||
};
|
|
||||||
proxyServerFactory: () => AnkiIntegrationRuntimeProxyServer;
|
|
||||||
logInfo: (message: string, ...args: unknown[]) => void;
|
|
||||||
logWarn: (message: string, ...args: unknown[]) => void;
|
|
||||||
logError: (message: string, ...args: unknown[]) => void;
|
|
||||||
onConfigChanged?: (config: AnkiConnectConfig) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
function trimToNonEmptyString(value: unknown): string | null {
|
|
||||||
if (typeof value !== 'string') return null;
|
|
||||||
const trimmed = value.trim();
|
|
||||||
return trimmed.length > 0 ? trimmed : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiConnectConfig {
|
|
||||||
const resolvedUrl = trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
|
|
||||||
const proxySource =
|
|
||||||
config.proxy && typeof config.proxy === 'object'
|
|
||||||
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
|
|
||||||
: {};
|
|
||||||
const normalizedProxyPort =
|
|
||||||
typeof proxySource.port === 'number' &&
|
|
||||||
Number.isInteger(proxySource.port) &&
|
|
||||||
proxySource.port >= 1 &&
|
|
||||||
proxySource.port <= 65535
|
|
||||||
? proxySource.port
|
|
||||||
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.port;
|
|
||||||
const normalizedProxyHost =
|
|
||||||
trimToNonEmptyString(proxySource.host) ?? DEFAULT_ANKI_CONNECT_CONFIG.proxy?.host;
|
|
||||||
const normalizedProxyUpstreamUrl = trimToNonEmptyString(proxySource.upstreamUrl) ?? resolvedUrl;
|
|
||||||
|
|
||||||
return {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG,
|
|
||||||
...config,
|
|
||||||
url: resolvedUrl,
|
|
||||||
fields: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.fields,
|
|
||||||
...(config.fields ?? {}),
|
|
||||||
},
|
|
||||||
proxy: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.proxy,
|
|
||||||
...(config.proxy ?? {}),
|
|
||||||
enabled: proxySource.enabled === true,
|
|
||||||
host: normalizedProxyHost,
|
|
||||||
port: normalizedProxyPort,
|
|
||||||
upstreamUrl: normalizedProxyUpstreamUrl,
|
|
||||||
},
|
|
||||||
ai: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.ai,
|
|
||||||
...(config.openRouter ?? {}),
|
|
||||||
...(config.ai ?? {}),
|
|
||||||
},
|
|
||||||
media: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.media,
|
|
||||||
...(config.media ?? {}),
|
|
||||||
},
|
|
||||||
behavior: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
|
|
||||||
...(config.behavior ?? {}),
|
|
||||||
},
|
|
||||||
metadata: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.metadata,
|
|
||||||
...(config.metadata ?? {}),
|
|
||||||
},
|
|
||||||
isLapis: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.isLapis,
|
|
||||||
...(config.isLapis ?? {}),
|
|
||||||
},
|
|
||||||
isKiku: {
|
|
||||||
...DEFAULT_ANKI_CONNECT_CONFIG.isKiku,
|
|
||||||
...(config.isKiku ?? {}),
|
|
||||||
},
|
|
||||||
} as AnkiConnectConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class AnkiIntegrationRuntime {
|
|
||||||
private config: AnkiConnectConfig;
|
|
||||||
private proxyServer: AnkiIntegrationRuntimeProxyServer | null = null;
|
|
||||||
private started = false;
|
|
||||||
|
|
||||||
constructor(private readonly deps: AnkiIntegrationRuntimeDeps) {
|
|
||||||
this.config = normalizeAnkiIntegrationConfig(deps.initialConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
getConfig(): AnkiConnectConfig {
|
|
||||||
return this.config;
|
|
||||||
}
|
|
||||||
|
|
||||||
start(): void {
|
|
||||||
if (this.started) {
|
|
||||||
this.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
this.deps.knownWordCache.startLifecycle();
|
|
||||||
this.startTransport();
|
|
||||||
this.started = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
stop(): void {
|
|
||||||
this.stopTransport();
|
|
||||||
this.deps.knownWordCache.stopLifecycle();
|
|
||||||
this.started = false;
|
|
||||||
this.deps.logInfo('Stopped AnkiConnect integration');
|
|
||||||
}
|
|
||||||
|
|
||||||
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
|
|
||||||
const wasKnownWordCacheEnabled = this.config.nPlusOne?.highlightEnabled === true;
|
|
||||||
const previousTransportKey = this.getTransportConfigKey(this.config);
|
|
||||||
|
|
||||||
const mergedConfig: AnkiConnectConfig = {
|
|
||||||
...this.config,
|
|
||||||
...patch,
|
|
||||||
nPlusOne:
|
|
||||||
patch.nPlusOne !== undefined
|
|
||||||
? {
|
|
||||||
...(this.config.nPlusOne ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne),
|
|
||||||
...patch.nPlusOne,
|
|
||||||
}
|
|
||||||
: this.config.nPlusOne,
|
|
||||||
fields:
|
|
||||||
patch.fields !== undefined
|
|
||||||
? { ...this.config.fields, ...patch.fields }
|
|
||||||
: this.config.fields,
|
|
||||||
media:
|
|
||||||
patch.media !== undefined ? { ...this.config.media, ...patch.media } : this.config.media,
|
|
||||||
behavior:
|
|
||||||
patch.behavior !== undefined
|
|
||||||
? { ...this.config.behavior, ...patch.behavior }
|
|
||||||
: this.config.behavior,
|
|
||||||
proxy:
|
|
||||||
patch.proxy !== undefined ? { ...this.config.proxy, ...patch.proxy } : this.config.proxy,
|
|
||||||
metadata:
|
|
||||||
patch.metadata !== undefined
|
|
||||||
? { ...this.config.metadata, ...patch.metadata }
|
|
||||||
: this.config.metadata,
|
|
||||||
isLapis:
|
|
||||||
patch.isLapis !== undefined
|
|
||||||
? { ...this.config.isLapis, ...patch.isLapis }
|
|
||||||
: this.config.isLapis,
|
|
||||||
isKiku:
|
|
||||||
patch.isKiku !== undefined
|
|
||||||
? { ...this.config.isKiku, ...patch.isKiku }
|
|
||||||
: this.config.isKiku,
|
|
||||||
};
|
|
||||||
this.config = normalizeAnkiIntegrationConfig(mergedConfig);
|
|
||||||
this.deps.onConfigChanged?.(this.config);
|
|
||||||
|
|
||||||
if (wasKnownWordCacheEnabled && this.config.nPlusOne?.highlightEnabled === false) {
|
|
||||||
this.deps.knownWordCache.stopLifecycle();
|
|
||||||
this.deps.knownWordCache.clearKnownWordCacheState();
|
|
||||||
} else {
|
|
||||||
this.deps.knownWordCache.startLifecycle();
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextTransportKey = this.getTransportConfigKey(this.config);
|
|
||||||
if (this.started && previousTransportKey !== nextTransportKey) {
|
|
||||||
this.stopTransport();
|
|
||||||
this.startTransport();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getOrCreateProxyServer(): AnkiIntegrationRuntimeProxyServer {
|
|
||||||
if (!this.proxyServer) {
|
|
||||||
this.proxyServer = this.deps.proxyServerFactory();
|
|
||||||
}
|
|
||||||
return this.proxyServer;
|
|
||||||
}
|
|
||||||
|
|
||||||
private isProxyTransportEnabled(config: AnkiConnectConfig = this.config): boolean {
|
|
||||||
return config.proxy?.enabled === true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getTransportConfigKey(config: AnkiConnectConfig = this.config): string {
|
|
||||||
if (this.isProxyTransportEnabled(config)) {
|
|
||||||
return [
|
|
||||||
'proxy',
|
|
||||||
config.proxy?.host ?? '',
|
|
||||||
String(config.proxy?.port ?? ''),
|
|
||||||
config.proxy?.upstreamUrl ?? '',
|
|
||||||
].join(':');
|
|
||||||
}
|
|
||||||
return ['polling', String(config.pollingRate ?? DEFAULT_ANKI_CONNECT_CONFIG.pollingRate)].join(
|
|
||||||
':',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private startTransport(): void {
|
|
||||||
if (this.isProxyTransportEnabled()) {
|
|
||||||
const proxyHost = this.config.proxy?.host ?? '127.0.0.1';
|
|
||||||
const proxyPort = this.config.proxy?.port ?? 8766;
|
|
||||||
const upstreamUrl = this.config.proxy?.upstreamUrl ?? this.config.url ?? '';
|
|
||||||
this.getOrCreateProxyServer().start({
|
|
||||||
host: proxyHost,
|
|
||||||
port: proxyPort,
|
|
||||||
upstreamUrl,
|
|
||||||
});
|
|
||||||
this.deps.logInfo(
|
|
||||||
`Starting AnkiConnect integration with local proxy: http://${proxyHost}:${proxyPort} -> ${upstreamUrl}`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.deps.logInfo(
|
|
||||||
'Starting AnkiConnect integration with polling rate:',
|
|
||||||
this.config.pollingRate,
|
|
||||||
);
|
|
||||||
this.deps.pollingRunner.start();
|
|
||||||
}
|
|
||||||
|
|
||||||
private stopTransport(): void {
|
|
||||||
this.deps.pollingRunner.stop();
|
|
||||||
this.proxyServer?.stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -169,9 +169,4 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
|
|||||||
assert.equal(background.background, true);
|
assert.equal(background.background, true);
|
||||||
assert.equal(hasExplicitCommand(background), true);
|
assert.equal(hasExplicitCommand(background), true);
|
||||||
assert.equal(shouldStartApp(background), true);
|
assert.equal(shouldStartApp(background), true);
|
||||||
|
|
||||||
const setup = parseArgs(['--setup']);
|
|
||||||
assert.equal((setup as typeof setup & { setup?: boolean }).setup, true);
|
|
||||||
assert.equal(hasExplicitCommand(setup), true);
|
|
||||||
assert.equal(shouldStartApp(setup), true);
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ export interface CliArgs {
|
|||||||
toggle: boolean;
|
toggle: boolean;
|
||||||
toggleVisibleOverlay: boolean;
|
toggleVisibleOverlay: boolean;
|
||||||
settings: boolean;
|
settings: boolean;
|
||||||
setup: boolean;
|
|
||||||
show: boolean;
|
show: boolean;
|
||||||
hide: boolean;
|
hide: boolean;
|
||||||
showVisibleOverlay: boolean;
|
showVisibleOverlay: boolean;
|
||||||
@@ -72,7 +71,6 @@ export function parseArgs(argv: string[]): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
setup: false,
|
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
@@ -127,7 +125,6 @@ export function parseArgs(argv: string[]): CliArgs {
|
|||||||
else if (arg === '--toggle') args.toggle = true;
|
else if (arg === '--toggle') args.toggle = true;
|
||||||
else if (arg === '--toggle-visible-overlay') args.toggleVisibleOverlay = true;
|
else if (arg === '--toggle-visible-overlay') args.toggleVisibleOverlay = true;
|
||||||
else if (arg === '--settings' || arg === '--yomitan') args.settings = true;
|
else if (arg === '--settings' || arg === '--yomitan') args.settings = true;
|
||||||
else if (arg === '--setup') args.setup = true;
|
|
||||||
else if (arg === '--show') args.show = true;
|
else if (arg === '--show') args.show = true;
|
||||||
else if (arg === '--hide') args.hide = true;
|
else if (arg === '--hide') args.hide = true;
|
||||||
else if (arg === '--show-visible-overlay') args.showVisibleOverlay = true;
|
else if (arg === '--show-visible-overlay') args.showVisibleOverlay = true;
|
||||||
@@ -301,7 +298,6 @@ export function hasExplicitCommand(args: CliArgs): boolean {
|
|||||||
args.toggle ||
|
args.toggle ||
|
||||||
args.toggleVisibleOverlay ||
|
args.toggleVisibleOverlay ||
|
||||||
args.settings ||
|
args.settings ||
|
||||||
args.setup ||
|
|
||||||
args.show ||
|
args.show ||
|
||||||
args.hide ||
|
args.hide ||
|
||||||
args.showVisibleOverlay ||
|
args.showVisibleOverlay ||
|
||||||
@@ -345,7 +341,6 @@ export function shouldStartApp(args: CliArgs): boolean {
|
|||||||
args.toggle ||
|
args.toggle ||
|
||||||
args.toggleVisibleOverlay ||
|
args.toggleVisibleOverlay ||
|
||||||
args.settings ||
|
args.settings ||
|
||||||
args.setup ||
|
|
||||||
args.copySubtitle ||
|
args.copySubtitle ||
|
||||||
args.copySubtitleMultiple ||
|
args.copySubtitleMultiple ||
|
||||||
args.mineSentence ||
|
args.mineSentence ||
|
||||||
@@ -376,7 +371,6 @@ export function shouldRunSettingsOnlyStartup(args: CliArgs): boolean {
|
|||||||
!args.toggleVisibleOverlay &&
|
!args.toggleVisibleOverlay &&
|
||||||
!args.show &&
|
!args.show &&
|
||||||
!args.hide &&
|
!args.hide &&
|
||||||
!args.setup &&
|
|
||||||
!args.showVisibleOverlay &&
|
!args.showVisibleOverlay &&
|
||||||
!args.hideVisibleOverlay &&
|
!args.hideVisibleOverlay &&
|
||||||
!args.copySubtitle &&
|
!args.copySubtitle &&
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ test('printHelp includes configured texthooker port', () => {
|
|||||||
assert.match(output, /--help\s+Show this help/);
|
assert.match(output, /--help\s+Show this help/);
|
||||||
assert.match(output, /default: 7777/);
|
assert.match(output, /default: 7777/);
|
||||||
assert.match(output, /--refresh-known-words/);
|
assert.match(output, /--refresh-known-words/);
|
||||||
assert.match(output, /--setup\s+Open first-run setup window/);
|
|
||||||
assert.match(output, /--anilist-status/);
|
assert.match(output, /--anilist-status/);
|
||||||
assert.match(output, /--anilist-retry-queue/);
|
assert.match(output, /--anilist-retry-queue/);
|
||||||
assert.match(output, /--dictionary/);
|
assert.match(output, /--dictionary/);
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ ${B}Overlay${R}
|
|||||||
--show-visible-overlay Show subtitle overlay
|
--show-visible-overlay Show subtitle overlay
|
||||||
--hide-visible-overlay Hide subtitle overlay
|
--hide-visible-overlay Hide subtitle overlay
|
||||||
--settings Open Yomitan settings window
|
--settings Open Yomitan settings window
|
||||||
--setup Open first-run setup window
|
|
||||||
--auto-start-overlay Auto-hide mpv subs, show overlay on connect
|
--auto-start-overlay Auto-hide mpv subs, show overlay on connect
|
||||||
|
|
||||||
${B}Mining${R}
|
${B}Mining${R}
|
||||||
|
|||||||
@@ -16,9 +16,6 @@ test('loads defaults when config is missing', () => {
|
|||||||
const service = new ConfigService(dir);
|
const service = new ConfigService(dir);
|
||||||
const config = service.getConfig();
|
const config = service.getConfig();
|
||||||
assert.equal(config.websocket.port, DEFAULT_CONFIG.websocket.port);
|
assert.equal(config.websocket.port, DEFAULT_CONFIG.websocket.port);
|
||||||
assert.equal(config.annotationWebsocket.enabled, DEFAULT_CONFIG.annotationWebsocket.enabled);
|
|
||||||
assert.equal(config.annotationWebsocket.port, DEFAULT_CONFIG.annotationWebsocket.port);
|
|
||||||
assert.equal(config.texthooker.launchAtStartup, true);
|
|
||||||
assert.equal(config.ankiConnect.behavior.autoUpdateNewCards, true);
|
assert.equal(config.ankiConnect.behavior.autoUpdateNewCards, true);
|
||||||
assert.deepEqual(config.ankiConnect.tags, ['SubMiner']);
|
assert.deepEqual(config.ankiConnect.tags, ['SubMiner']);
|
||||||
assert.equal(config.anilist.enabled, false);
|
assert.equal(config.anilist.enabled, false);
|
||||||
@@ -27,9 +24,6 @@ test('loads defaults when config is missing', () => {
|
|||||||
assert.equal(config.anilist.characterDictionary.maxLoaded, 3);
|
assert.equal(config.anilist.characterDictionary.maxLoaded, 3);
|
||||||
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
||||||
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
||||||
assert.equal(config.anilist.characterDictionary.collapsibleSections.description, false);
|
|
||||||
assert.equal(config.anilist.characterDictionary.collapsibleSections.characterInformation, false);
|
|
||||||
assert.equal(config.anilist.characterDictionary.collapsibleSections.voicedBy, false);
|
|
||||||
assert.equal(config.jellyfin.remoteControlEnabled, true);
|
assert.equal(config.jellyfin.remoteControlEnabled, true);
|
||||||
assert.equal(config.jellyfin.remoteControlAutoConnect, true);
|
assert.equal(config.jellyfin.remoteControlAutoConnect, true);
|
||||||
assert.equal(config.jellyfin.autoAnnounce, false);
|
assert.equal(config.jellyfin.autoAnnounce, false);
|
||||||
@@ -134,88 +128,6 @@ test('parses subtitleStyle.preserveLineBreaks and warns on invalid values', () =
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('parses texthooker.launchAtStartup and warns on invalid values', () => {
|
|
||||||
const validDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(validDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"texthooker": {
|
|
||||||
"launchAtStartup": false
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const validService = new ConfigService(validDir);
|
|
||||||
assert.equal(validService.getConfig().texthooker.launchAtStartup, false);
|
|
||||||
|
|
||||||
const invalidDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(invalidDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"texthooker": {
|
|
||||||
"launchAtStartup": "yes"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const invalidService = new ConfigService(invalidDir);
|
|
||||||
assert.equal(
|
|
||||||
invalidService.getConfig().texthooker.launchAtStartup,
|
|
||||||
DEFAULT_CONFIG.texthooker.launchAtStartup,
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
invalidService.getWarnings().some((warning) => warning.path === 'texthooker.launchAtStartup'),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('parses annotationWebsocket settings and warns on invalid values', () => {
|
|
||||||
const validDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(validDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"annotationWebsocket": {
|
|
||||||
"enabled": false,
|
|
||||||
"port": 7788
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const validService = new ConfigService(validDir);
|
|
||||||
assert.equal(validService.getConfig().annotationWebsocket.enabled, false);
|
|
||||||
assert.equal(validService.getConfig().annotationWebsocket.port, 7788);
|
|
||||||
|
|
||||||
const invalidDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(invalidDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"annotationWebsocket": {
|
|
||||||
"enabled": "yes",
|
|
||||||
"port": "bad"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const invalidService = new ConfigService(invalidDir);
|
|
||||||
assert.equal(
|
|
||||||
invalidService.getConfig().annotationWebsocket.enabled,
|
|
||||||
DEFAULT_CONFIG.annotationWebsocket.enabled,
|
|
||||||
);
|
|
||||||
assert.equal(
|
|
||||||
invalidService.getConfig().annotationWebsocket.port,
|
|
||||||
DEFAULT_CONFIG.annotationWebsocket.port,
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.enabled'),
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.port'),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('parses subtitleStyle.autoPauseVideoOnHover and warns on invalid values', () => {
|
test('parses subtitleStyle.autoPauseVideoOnHover and warns on invalid values', () => {
|
||||||
const validDir = makeTempDir();
|
const validDir = makeTempDir();
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
@@ -330,47 +242,6 @@ test('parses subtitleStyle.hoverTokenColor and warns on invalid values', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
|
|
||||||
const validDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(validDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"subtitleStyle": {
|
|
||||||
"nameMatchColor": "#eed49f"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const validService = new ConfigService(validDir);
|
|
||||||
assert.equal(
|
|
||||||
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
|
|
||||||
.nameMatchColor ?? null) as string | null,
|
|
||||||
'#eed49f',
|
|
||||||
);
|
|
||||||
|
|
||||||
const invalidDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(invalidDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"subtitleStyle": {
|
|
||||||
"nameMatchColor": "pink"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const invalidService = new ConfigService(invalidDir);
|
|
||||||
assert.equal(
|
|
||||||
((invalidService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
|
|
||||||
.nameMatchColor ?? null) as string | null,
|
|
||||||
'#f5bde6',
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
invalidService.getWarnings().some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values', () => {
|
test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values', () => {
|
||||||
const validDir = makeTempDir();
|
const validDir = makeTempDir();
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
@@ -409,44 +280,6 @@ test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('parses subtitleStyle.nameMatchEnabled and warns on invalid values', () => {
|
|
||||||
const validDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(validDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"subtitleStyle": {
|
|
||||||
"nameMatchEnabled": false
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const validService = new ConfigService(validDir);
|
|
||||||
assert.equal(validService.getConfig().subtitleStyle.nameMatchEnabled, false);
|
|
||||||
|
|
||||||
const invalidDir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(invalidDir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"subtitleStyle": {
|
|
||||||
"nameMatchEnabled": "no"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const invalidService = new ConfigService(invalidDir);
|
|
||||||
assert.equal(
|
|
||||||
invalidService.getConfig().subtitleStyle.nameMatchEnabled,
|
|
||||||
DEFAULT_CONFIG.subtitleStyle.nameMatchEnabled,
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
invalidService
|
|
||||||
.getWarnings()
|
|
||||||
.some((warning) => warning.path === 'subtitleStyle.nameMatchEnabled'),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('parses anilist.enabled and warns for invalid value', () => {
|
test('parses anilist.enabled and warns for invalid value', () => {
|
||||||
const dir = makeTempDir();
|
const dir = makeTempDir();
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
@@ -497,49 +330,10 @@ test('parses anilist.characterDictionary config with clamping and enum validatio
|
|||||||
assert.equal(config.anilist.characterDictionary.maxLoaded, 20);
|
assert.equal(config.anilist.characterDictionary.maxLoaded, 20);
|
||||||
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
||||||
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
||||||
assert.ok(
|
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'));
|
||||||
warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'),
|
|
||||||
);
|
|
||||||
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.maxLoaded'));
|
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.maxLoaded'));
|
||||||
assert.ok(
|
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'));
|
||||||
warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'),
|
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'));
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('parses anilist.characterDictionary.collapsibleSections booleans and warns on invalid values', () => {
|
|
||||||
const dir = makeTempDir();
|
|
||||||
fs.writeFileSync(
|
|
||||||
path.join(dir, 'config.jsonc'),
|
|
||||||
`{
|
|
||||||
"anilist": {
|
|
||||||
"characterDictionary": {
|
|
||||||
"collapsibleSections": {
|
|
||||||
"description": true,
|
|
||||||
"characterInformation": "yes",
|
|
||||||
"voicedBy": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
'utf-8',
|
|
||||||
);
|
|
||||||
|
|
||||||
const service = new ConfigService(dir);
|
|
||||||
const config = service.getConfig();
|
|
||||||
const warnings = service.getWarnings();
|
|
||||||
|
|
||||||
assert.equal(config.anilist.characterDictionary.collapsibleSections.description, true);
|
|
||||||
assert.equal(config.anilist.characterDictionary.collapsibleSections.characterInformation, false);
|
|
||||||
assert.equal(config.anilist.characterDictionary.collapsibleSections.voicedBy, true);
|
|
||||||
assert.ok(
|
|
||||||
warnings.some(
|
|
||||||
(warning) =>
|
|
||||||
warning.path === 'anilist.characterDictionary.collapsibleSections.characterInformation',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test('parses jellyfin remote control fields', () => {
|
test('parses jellyfin remote control fields', () => {
|
||||||
@@ -965,10 +759,6 @@ test('warning emission order is deterministic across reloads', () => {
|
|||||||
"enabled": "sometimes",
|
"enabled": "sometimes",
|
||||||
"port": -1
|
"port": -1
|
||||||
},
|
},
|
||||||
"annotationWebsocket": {
|
|
||||||
"enabled": "sometimes",
|
|
||||||
"port": -1
|
|
||||||
},
|
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "trace"
|
"level": "trace"
|
||||||
}
|
}
|
||||||
@@ -985,14 +775,7 @@ test('warning emission order is deterministic across reloads', () => {
|
|||||||
assert.deepEqual(secondWarnings, firstWarnings);
|
assert.deepEqual(secondWarnings, firstWarnings);
|
||||||
assert.deepEqual(
|
assert.deepEqual(
|
||||||
firstWarnings.map((warning) => warning.path),
|
firstWarnings.map((warning) => warning.path),
|
||||||
[
|
['unknownFeature', 'websocket.enabled', 'websocket.port', 'logging.level'],
|
||||||
'unknownFeature',
|
|
||||||
'websocket.enabled',
|
|
||||||
'websocket.port',
|
|
||||||
'annotationWebsocket.enabled',
|
|
||||||
'annotationWebsocket.port',
|
|
||||||
'logging.level',
|
|
||||||
],
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1562,17 +1345,8 @@ test('template generator includes known keys', () => {
|
|||||||
output,
|
output,
|
||||||
/"enabled": "auto",? \/\/ Built-in subtitle websocket server mode\. Values: auto \| true \| false/,
|
/"enabled": "auto",? \/\/ Built-in subtitle websocket server mode\. Values: auto \| true \| false/,
|
||||||
);
|
);
|
||||||
assert.match(
|
|
||||||
output,
|
|
||||||
/"enabled": true,? \/\/ Annotated subtitle websocket server enabled state\. Values: true \| false/,
|
|
||||||
);
|
|
||||||
assert.match(output, /"port": 6678,? \/\/ Annotated subtitle websocket server port\./);
|
|
||||||
assert.match(
|
assert.match(
|
||||||
output,
|
output,
|
||||||
/"enabled": false,? \/\/ Enable AnkiConnect integration\. Values: true \| false/,
|
/"enabled": false,? \/\/ Enable AnkiConnect integration\. Values: true \| false/,
|
||||||
);
|
);
|
||||||
assert.match(
|
|
||||||
output,
|
|
||||||
/"launchAtStartup": true,? \/\/ Launch texthooker server automatically when SubMiner starts\. Values: true \| false/,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ const {
|
|||||||
subtitlePosition,
|
subtitlePosition,
|
||||||
keybindings,
|
keybindings,
|
||||||
websocket,
|
websocket,
|
||||||
annotationWebsocket,
|
|
||||||
logging,
|
logging,
|
||||||
texthooker,
|
texthooker,
|
||||||
shortcuts,
|
shortcuts,
|
||||||
@@ -40,7 +39,6 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
|
|||||||
subtitlePosition,
|
subtitlePosition,
|
||||||
keybindings,
|
keybindings,
|
||||||
websocket,
|
websocket,
|
||||||
annotationWebsocket,
|
|
||||||
logging,
|
logging,
|
||||||
texthooker,
|
texthooker,
|
||||||
ankiConnect,
|
ankiConnect,
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ export const CORE_DEFAULT_CONFIG: Pick<
|
|||||||
| 'subtitlePosition'
|
| 'subtitlePosition'
|
||||||
| 'keybindings'
|
| 'keybindings'
|
||||||
| 'websocket'
|
| 'websocket'
|
||||||
| 'annotationWebsocket'
|
|
||||||
| 'logging'
|
| 'logging'
|
||||||
| 'texthooker'
|
| 'texthooker'
|
||||||
| 'shortcuts'
|
| 'shortcuts'
|
||||||
@@ -20,15 +19,10 @@ export const CORE_DEFAULT_CONFIG: Pick<
|
|||||||
enabled: 'auto',
|
enabled: 'auto',
|
||||||
port: 6677,
|
port: 6677,
|
||||||
},
|
},
|
||||||
annotationWebsocket: {
|
|
||||||
enabled: true,
|
|
||||||
port: 6678,
|
|
||||||
},
|
|
||||||
logging: {
|
logging: {
|
||||||
level: 'info',
|
level: 'info',
|
||||||
},
|
},
|
||||||
texthooker: {
|
texthooker: {
|
||||||
launchAtStartup: true,
|
|
||||||
openBrowser: true,
|
openBrowser: true,
|
||||||
},
|
},
|
||||||
shortcuts: {
|
shortcuts: {
|
||||||
|
|||||||
@@ -92,11 +92,6 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick<
|
|||||||
maxLoaded: 3,
|
maxLoaded: 3,
|
||||||
evictionPolicy: 'delete',
|
evictionPolicy: 'delete',
|
||||||
profileScope: 'all',
|
profileScope: 'all',
|
||||||
collapsibleSections: {
|
|
||||||
description: false,
|
|
||||||
characterInformation: false,
|
|
||||||
voicedBy: false,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
jellyfin: {
|
jellyfin: {
|
||||||
|
|||||||
@@ -8,8 +8,6 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
|
|||||||
autoPauseVideoOnYomitanPopup: false,
|
autoPauseVideoOnYomitanPopup: false,
|
||||||
hoverTokenColor: '#f4dbd6',
|
hoverTokenColor: '#f4dbd6',
|
||||||
hoverTokenBackgroundColor: 'rgba(54, 58, 79, 0.84)',
|
hoverTokenBackgroundColor: 'rgba(54, 58, 79, 0.84)',
|
||||||
nameMatchEnabled: true,
|
|
||||||
nameMatchColor: '#f5bde6',
|
|
||||||
fontFamily: 'M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP',
|
fontFamily: 'M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP',
|
||||||
fontSize: 35,
|
fontSize: 35,
|
||||||
fontColor: '#cad3f5',
|
fontColor: '#cad3f5',
|
||||||
@@ -39,7 +37,7 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
|
|||||||
mode: 'single',
|
mode: 'single',
|
||||||
matchMode: 'headword',
|
matchMode: 'headword',
|
||||||
singleColor: '#f5a97f',
|
singleColor: '#f5a97f',
|
||||||
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#8bd5ca', '#8aadf4'],
|
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#a6e3a1', '#8aadf4'],
|
||||||
},
|
},
|
||||||
secondary: {
|
secondary: {
|
||||||
fontFamily: 'Inter, Noto Sans, Helvetica Neue, sans-serif',
|
fontFamily: 'Inter, Noto Sans, Helvetica Neue, sans-serif',
|
||||||
|
|||||||
@@ -18,13 +18,11 @@ test('config option registry includes critical paths and has unique entries', ()
|
|||||||
|
|
||||||
for (const requiredPath of [
|
for (const requiredPath of [
|
||||||
'logging.level',
|
'logging.level',
|
||||||
'annotationWebsocket.enabled',
|
|
||||||
'startupWarmups.lowPowerMode',
|
'startupWarmups.lowPowerMode',
|
||||||
'subtitleStyle.enableJlpt',
|
'subtitleStyle.enableJlpt',
|
||||||
'subtitleStyle.autoPauseVideoOnYomitanPopup',
|
'subtitleStyle.autoPauseVideoOnYomitanPopup',
|
||||||
'ankiConnect.enabled',
|
'ankiConnect.enabled',
|
||||||
'anilist.characterDictionary.enabled',
|
'anilist.characterDictionary.enabled',
|
||||||
'anilist.characterDictionary.collapsibleSections.description',
|
|
||||||
'immersionTracking.enabled',
|
'immersionTracking.enabled',
|
||||||
]) {
|
]) {
|
||||||
assert.ok(paths.includes(requiredPath), `missing config path: ${requiredPath}`);
|
assert.ok(paths.includes(requiredPath), `missing config path: ${requiredPath}`);
|
||||||
@@ -37,7 +35,6 @@ test('config template sections include expected domains and unique keys', () =>
|
|||||||
const keys = CONFIG_TEMPLATE_SECTIONS.map((section) => section.key);
|
const keys = CONFIG_TEMPLATE_SECTIONS.map((section) => section.key);
|
||||||
const requiredKeys: (typeof keys)[number][] = [
|
const requiredKeys: (typeof keys)[number][] = [
|
||||||
'websocket',
|
'websocket',
|
||||||
'annotationWebsocket',
|
|
||||||
'startupWarmups',
|
'startupWarmups',
|
||||||
'subtitleStyle',
|
'subtitleStyle',
|
||||||
'ankiConnect',
|
'ankiConnect',
|
||||||
|
|||||||
@@ -12,12 +12,6 @@ export function buildCoreConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.logging.level,
|
defaultValue: defaultConfig.logging.level,
|
||||||
description: 'Minimum log level for runtime logging.',
|
description: 'Minimum log level for runtime logging.',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
path: 'texthooker.launchAtStartup',
|
|
||||||
kind: 'boolean',
|
|
||||||
defaultValue: defaultConfig.texthooker.launchAtStartup,
|
|
||||||
description: 'Launch texthooker server automatically when SubMiner starts.',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
path: 'websocket.enabled',
|
path: 'websocket.enabled',
|
||||||
kind: 'enum',
|
kind: 'enum',
|
||||||
@@ -31,18 +25,6 @@ export function buildCoreConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.websocket.port,
|
defaultValue: defaultConfig.websocket.port,
|
||||||
description: 'Built-in subtitle websocket server port.',
|
description: 'Built-in subtitle websocket server port.',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
path: 'annotationWebsocket.enabled',
|
|
||||||
kind: 'boolean',
|
|
||||||
defaultValue: defaultConfig.annotationWebsocket.enabled,
|
|
||||||
description: 'Annotated subtitle websocket server enabled state.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: 'annotationWebsocket.port',
|
|
||||||
kind: 'number',
|
|
||||||
defaultValue: defaultConfig.annotationWebsocket.port,
|
|
||||||
description: 'Annotated subtitle websocket server port.',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
path: 'subsync.defaultMode',
|
path: 'subsync.defaultMode',
|
||||||
kind: 'enum',
|
kind: 'enum',
|
||||||
|
|||||||
@@ -171,28 +171,6 @@ export function buildIntegrationConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.anilist.characterDictionary.profileScope,
|
defaultValue: defaultConfig.anilist.characterDictionary.profileScope,
|
||||||
description: 'Yomitan profile scope for dictionary enable/disable updates.',
|
description: 'Yomitan profile scope for dictionary enable/disable updates.',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
path: 'anilist.characterDictionary.collapsibleSections.description',
|
|
||||||
kind: 'boolean',
|
|
||||||
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.description,
|
|
||||||
description:
|
|
||||||
'Open the Description section by default in character dictionary glossary entries.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: 'anilist.characterDictionary.collapsibleSections.characterInformation',
|
|
||||||
kind: 'boolean',
|
|
||||||
defaultValue:
|
|
||||||
defaultConfig.anilist.characterDictionary.collapsibleSections.characterInformation,
|
|
||||||
description:
|
|
||||||
'Open the Character Information section by default in character dictionary glossary entries.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: 'anilist.characterDictionary.collapsibleSections.voicedBy',
|
|
||||||
kind: 'boolean',
|
|
||||||
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.voicedBy,
|
|
||||||
description:
|
|
||||||
'Open the Voiced by section by default in character dictionary glossary entries.',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
path: 'jellyfin.enabled',
|
path: 'jellyfin.enabled',
|
||||||
kind: 'boolean',
|
kind: 'boolean',
|
||||||
|
|||||||
@@ -47,20 +47,6 @@ export function buildSubtitleConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.subtitleStyle.hoverTokenBackgroundColor,
|
defaultValue: defaultConfig.subtitleStyle.hoverTokenBackgroundColor,
|
||||||
description: 'CSS color used for hovered subtitle token background highlight in mpv.',
|
description: 'CSS color used for hovered subtitle token background highlight in mpv.',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
path: 'subtitleStyle.nameMatchEnabled',
|
|
||||||
kind: 'boolean',
|
|
||||||
defaultValue: defaultConfig.subtitleStyle.nameMatchEnabled,
|
|
||||||
description:
|
|
||||||
'Enable subtitle token coloring for matches from the SubMiner character dictionary.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: 'subtitleStyle.nameMatchColor',
|
|
||||||
kind: 'string',
|
|
||||||
defaultValue: defaultConfig.subtitleStyle.nameMatchColor,
|
|
||||||
description:
|
|
||||||
'Hex color used when a subtitle token matches an entry from the SubMiner character dictionary.',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
path: 'subtitleStyle.frequencyDictionary.enabled',
|
path: 'subtitleStyle.frequencyDictionary.enabled',
|
||||||
kind: 'boolean',
|
kind: 'boolean',
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ const CORE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Texthooker Server',
|
title: 'Texthooker Server',
|
||||||
description: ['Configure texthooker startup launch and browser opening behavior.'],
|
description: ['Control whether browser opens automatically for texthooker.'],
|
||||||
key: 'texthooker',
|
key: 'texthooker',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -21,14 +21,6 @@ const CORE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
|
|||||||
],
|
],
|
||||||
key: 'websocket',
|
key: 'websocket',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
title: 'Annotation WebSocket',
|
|
||||||
description: [
|
|
||||||
'Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.',
|
|
||||||
'Independent from websocket.auto and defaults to port 6678.',
|
|
||||||
],
|
|
||||||
key: 'annotationWebsocket',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
title: 'Logging',
|
title: 'Logging',
|
||||||
description: ['Controls logging verbosity.', 'Set to debug for full runtime diagnostics.'],
|
description: ['Controls logging verbosity.', 'Set to debug for full runtime diagnostics.'],
|
||||||
|
|||||||
@@ -5,18 +5,6 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
|
|||||||
const { src, resolved, warn } = context;
|
const { src, resolved, warn } = context;
|
||||||
|
|
||||||
if (isObject(src.texthooker)) {
|
if (isObject(src.texthooker)) {
|
||||||
const launchAtStartup = asBoolean(src.texthooker.launchAtStartup);
|
|
||||||
if (launchAtStartup !== undefined) {
|
|
||||||
resolved.texthooker.launchAtStartup = launchAtStartup;
|
|
||||||
} else if (src.texthooker.launchAtStartup !== undefined) {
|
|
||||||
warn(
|
|
||||||
'texthooker.launchAtStartup',
|
|
||||||
src.texthooker.launchAtStartup,
|
|
||||||
resolved.texthooker.launchAtStartup,
|
|
||||||
'Expected boolean.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const openBrowser = asBoolean(src.texthooker.openBrowser);
|
const openBrowser = asBoolean(src.texthooker.openBrowser);
|
||||||
if (openBrowser !== undefined) {
|
if (openBrowser !== undefined) {
|
||||||
resolved.texthooker.openBrowser = openBrowser;
|
resolved.texthooker.openBrowser = openBrowser;
|
||||||
@@ -56,32 +44,6 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isObject(src.annotationWebsocket)) {
|
|
||||||
const enabled = asBoolean(src.annotationWebsocket.enabled);
|
|
||||||
if (enabled !== undefined) {
|
|
||||||
resolved.annotationWebsocket.enabled = enabled;
|
|
||||||
} else if (src.annotationWebsocket.enabled !== undefined) {
|
|
||||||
warn(
|
|
||||||
'annotationWebsocket.enabled',
|
|
||||||
src.annotationWebsocket.enabled,
|
|
||||||
resolved.annotationWebsocket.enabled,
|
|
||||||
'Expected boolean.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const port = asNumber(src.annotationWebsocket.port);
|
|
||||||
if (port !== undefined && port > 0 && port <= 65535) {
|
|
||||||
resolved.annotationWebsocket.port = Math.floor(port);
|
|
||||||
} else if (src.annotationWebsocket.port !== undefined) {
|
|
||||||
warn(
|
|
||||||
'annotationWebsocket.port',
|
|
||||||
src.annotationWebsocket.port,
|
|
||||||
resolved.annotationWebsocket.port,
|
|
||||||
'Expected integer between 1 and 65535.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isObject(src.logging)) {
|
if (isObject(src.logging)) {
|
||||||
const logLevel = asString(src.logging.level);
|
const logLevel = asString(src.logging.level);
|
||||||
if (
|
if (
|
||||||
|
|||||||
@@ -124,31 +124,6 @@ export function applyIntegrationConfig(context: ResolveContext): void {
|
|||||||
'Expected string.',
|
'Expected string.',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isObject(characterDictionary.collapsibleSections)) {
|
|
||||||
const collapsibleSections = characterDictionary.collapsibleSections;
|
|
||||||
const keys = ['description', 'characterInformation', 'voicedBy'] as const;
|
|
||||||
for (const key of keys) {
|
|
||||||
const value = asBoolean(collapsibleSections[key]);
|
|
||||||
if (value !== undefined) {
|
|
||||||
resolved.anilist.characterDictionary.collapsibleSections[key] = value;
|
|
||||||
} else if (collapsibleSections[key] !== undefined) {
|
|
||||||
warn(
|
|
||||||
`anilist.characterDictionary.collapsibleSections.${key}`,
|
|
||||||
collapsibleSections[key],
|
|
||||||
resolved.anilist.characterDictionary.collapsibleSections[key],
|
|
||||||
'Expected boolean.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (characterDictionary.collapsibleSections !== undefined) {
|
|
||||||
warn(
|
|
||||||
'anilist.characterDictionary.collapsibleSections',
|
|
||||||
characterDictionary.collapsibleSections,
|
|
||||||
resolved.anilist.characterDictionary.collapsibleSections,
|
|
||||||
'Expected object.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else if (src.anilist.characterDictionary !== undefined) {
|
} else if (src.anilist.characterDictionary !== undefined) {
|
||||||
warn(
|
warn(
|
||||||
'anilist.characterDictionary',
|
'anilist.characterDictionary',
|
||||||
|
|||||||
@@ -72,11 +72,6 @@ test('anilist character dictionary fields are parsed, clamped, and enum-validate
|
|||||||
maxLoaded: 99,
|
maxLoaded: 99,
|
||||||
evictionPolicy: 'purge' as never,
|
evictionPolicy: 'purge' as never,
|
||||||
profileScope: 'global' as never,
|
profileScope: 'global' as never,
|
||||||
collapsibleSections: {
|
|
||||||
description: true,
|
|
||||||
characterInformation: 'invalid' as never,
|
|
||||||
voicedBy: true,
|
|
||||||
} as never,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -88,19 +83,10 @@ test('anilist character dictionary fields are parsed, clamped, and enum-validate
|
|||||||
assert.equal(context.resolved.anilist.characterDictionary.maxLoaded, 20);
|
assert.equal(context.resolved.anilist.characterDictionary.maxLoaded, 20);
|
||||||
assert.equal(context.resolved.anilist.characterDictionary.evictionPolicy, 'delete');
|
assert.equal(context.resolved.anilist.characterDictionary.evictionPolicy, 'delete');
|
||||||
assert.equal(context.resolved.anilist.characterDictionary.profileScope, 'all');
|
assert.equal(context.resolved.anilist.characterDictionary.profileScope, 'all');
|
||||||
assert.equal(context.resolved.anilist.characterDictionary.collapsibleSections.description, true);
|
|
||||||
assert.equal(
|
|
||||||
context.resolved.anilist.characterDictionary.collapsibleSections.characterInformation,
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
assert.equal(context.resolved.anilist.characterDictionary.collapsibleSections.voicedBy, true);
|
|
||||||
|
|
||||||
const warnedPaths = warnings.map((warning) => warning.path);
|
const warnedPaths = warnings.map((warning) => warning.path);
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.refreshTtlHours'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.refreshTtlHours'));
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.maxLoaded'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.maxLoaded'));
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.evictionPolicy'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.evictionPolicy'));
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.profileScope'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.profileScope'));
|
||||||
assert.ok(
|
|
||||||
warnedPaths.includes('anilist.characterDictionary.collapsibleSections.characterInformation'),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -105,8 +105,6 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
|
|||||||
const fallbackSubtitleStyleHoverTokenColor = resolved.subtitleStyle.hoverTokenColor;
|
const fallbackSubtitleStyleHoverTokenColor = resolved.subtitleStyle.hoverTokenColor;
|
||||||
const fallbackSubtitleStyleHoverTokenBackgroundColor =
|
const fallbackSubtitleStyleHoverTokenBackgroundColor =
|
||||||
resolved.subtitleStyle.hoverTokenBackgroundColor;
|
resolved.subtitleStyle.hoverTokenBackgroundColor;
|
||||||
const fallbackSubtitleStyleNameMatchEnabled = resolved.subtitleStyle.nameMatchEnabled;
|
|
||||||
const fallbackSubtitleStyleNameMatchColor = resolved.subtitleStyle.nameMatchColor;
|
|
||||||
const fallbackFrequencyDictionary = {
|
const fallbackFrequencyDictionary = {
|
||||||
...resolved.subtitleStyle.frequencyDictionary,
|
...resolved.subtitleStyle.frequencyDictionary,
|
||||||
};
|
};
|
||||||
@@ -230,38 +228,6 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const nameMatchColor = asColor(
|
|
||||||
(src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor,
|
|
||||||
);
|
|
||||||
const nameMatchEnabled = asBoolean(
|
|
||||||
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled,
|
|
||||||
);
|
|
||||||
if (nameMatchEnabled !== undefined) {
|
|
||||||
resolved.subtitleStyle.nameMatchEnabled = nameMatchEnabled;
|
|
||||||
} else if (
|
|
||||||
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined
|
|
||||||
) {
|
|
||||||
resolved.subtitleStyle.nameMatchEnabled = fallbackSubtitleStyleNameMatchEnabled;
|
|
||||||
warn(
|
|
||||||
'subtitleStyle.nameMatchEnabled',
|
|
||||||
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled,
|
|
||||||
resolved.subtitleStyle.nameMatchEnabled,
|
|
||||||
'Expected boolean.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nameMatchColor !== undefined) {
|
|
||||||
resolved.subtitleStyle.nameMatchColor = nameMatchColor;
|
|
||||||
} else if ((src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor !== undefined) {
|
|
||||||
resolved.subtitleStyle.nameMatchColor = fallbackSubtitleStyleNameMatchColor;
|
|
||||||
warn(
|
|
||||||
'subtitleStyle.nameMatchColor',
|
|
||||||
(src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor,
|
|
||||||
resolved.subtitleStyle.nameMatchColor,
|
|
||||||
'Expected hex color.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const frequencyDictionary = isObject(
|
const frequencyDictionary = isObject(
|
||||||
(src.subtitleStyle as { frequencyDictionary?: unknown }).frequencyDictionary,
|
(src.subtitleStyle as { frequencyDictionary?: unknown }).frequencyDictionary,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -66,70 +66,6 @@ test('subtitleStyle autoPauseVideoOnYomitanPopup falls back on invalid value', (
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('subtitleStyle nameMatchEnabled falls back on invalid value', () => {
|
|
||||||
const { context, warnings } = createResolveContext({
|
|
||||||
subtitleStyle: {
|
|
||||||
nameMatchEnabled: 'invalid' as unknown as boolean,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
applySubtitleDomainConfig(context);
|
|
||||||
|
|
||||||
assert.equal(context.resolved.subtitleStyle.nameMatchEnabled, true);
|
|
||||||
assert.ok(
|
|
||||||
warnings.some(
|
|
||||||
(warning) =>
|
|
||||||
warning.path === 'subtitleStyle.nameMatchEnabled' &&
|
|
||||||
warning.message === 'Expected boolean.',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('subtitleStyle frequencyDictionary defaults to the teal fourth band color', () => {
|
|
||||||
const { context } = createResolveContext({});
|
|
||||||
|
|
||||||
applySubtitleDomainConfig(context);
|
|
||||||
|
|
||||||
assert.deepEqual(context.resolved.subtitleStyle.frequencyDictionary.bandedColors, [
|
|
||||||
'#ed8796',
|
|
||||||
'#f5a97f',
|
|
||||||
'#f9e2af',
|
|
||||||
'#8bd5ca',
|
|
||||||
'#8aadf4',
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('subtitleStyle nameMatchColor accepts valid values and warns on invalid', () => {
|
|
||||||
const valid = createResolveContext({
|
|
||||||
subtitleStyle: {
|
|
||||||
nameMatchColor: '#f5bde6',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
applySubtitleDomainConfig(valid.context);
|
|
||||||
assert.equal(
|
|
||||||
(valid.context.resolved.subtitleStyle as { nameMatchColor?: string }).nameMatchColor,
|
|
||||||
'#f5bde6',
|
|
||||||
);
|
|
||||||
|
|
||||||
const invalid = createResolveContext({
|
|
||||||
subtitleStyle: {
|
|
||||||
nameMatchColor: 'pink',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
applySubtitleDomainConfig(invalid.context);
|
|
||||||
assert.equal(
|
|
||||||
(invalid.context.resolved.subtitleStyle as { nameMatchColor?: string }).nameMatchColor,
|
|
||||||
'#f5bde6',
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
invalid.warnings.some(
|
|
||||||
(warning) =>
|
|
||||||
warning.path === 'subtitleStyle.nameMatchColor' &&
|
|
||||||
warning.message === 'Expected hex color.',
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('subtitleStyle frequencyDictionary.matchMode accepts valid values and warns on invalid', () => {
|
test('subtitleStyle frequencyDictionary.matchMode accepts valid values and warns on invalid', () => {
|
||||||
const valid = createResolveContext({
|
const valid = createResolveContext({
|
||||||
subtitleStyle: {
|
subtitleStyle: {
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
setup: false,
|
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ import { AppReadyRuntimeDeps, runAppReadyRuntime } from './startup';
|
|||||||
|
|
||||||
function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
||||||
const calls: string[] = [];
|
const calls: string[] = [];
|
||||||
const deps = {
|
const deps: AppReadyRuntimeDeps = {
|
||||||
ensureDefaultConfigBootstrap: () => calls.push('ensureDefaultConfigBootstrap'),
|
|
||||||
loadSubtitlePosition: () => calls.push('loadSubtitlePosition'),
|
loadSubtitlePosition: () => calls.push('loadSubtitlePosition'),
|
||||||
resolveKeybindings: () => calls.push('resolveKeybindings'),
|
resolveKeybindings: () => calls.push('resolveKeybindings'),
|
||||||
createMpvClient: () => calls.push('createMpvClient'),
|
createMpvClient: () => calls.push('createMpvClient'),
|
||||||
@@ -21,13 +20,8 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
|||||||
setSecondarySubMode: (mode) => calls.push(`setSecondarySubMode:${mode}`),
|
setSecondarySubMode: (mode) => calls.push(`setSecondarySubMode:${mode}`),
|
||||||
defaultSecondarySubMode: 'hover',
|
defaultSecondarySubMode: 'hover',
|
||||||
defaultWebsocketPort: 9001,
|
defaultWebsocketPort: 9001,
|
||||||
defaultAnnotationWebsocketPort: 6678,
|
|
||||||
defaultTexthookerPort: 5174,
|
|
||||||
hasMpvWebsocketPlugin: () => true,
|
hasMpvWebsocketPlugin: () => true,
|
||||||
startSubtitleWebsocket: (port) => calls.push(`startSubtitleWebsocket:${port}`),
|
startSubtitleWebsocket: (port) => calls.push(`startSubtitleWebsocket:${port}`),
|
||||||
startAnnotationWebsocket: (port) => calls.push(`startAnnotationWebsocket:${port}`),
|
|
||||||
startTexthooker: (port, websocketUrl) =>
|
|
||||||
calls.push(`startTexthooker:${port}:${websocketUrl ?? ''}`),
|
|
||||||
log: (message) => calls.push(`log:${message}`),
|
log: (message) => calls.push(`log:${message}`),
|
||||||
createMecabTokenizerAndCheck: async () => {
|
createMecabTokenizerAndCheck: async () => {
|
||||||
calls.push('createMecabTokenizerAndCheck');
|
calls.push('createMecabTokenizerAndCheck');
|
||||||
@@ -40,9 +34,6 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
|||||||
loadYomitanExtension: async () => {
|
loadYomitanExtension: async () => {
|
||||||
calls.push('loadYomitanExtension');
|
calls.push('loadYomitanExtension');
|
||||||
},
|
},
|
||||||
handleFirstRunSetup: async () => {
|
|
||||||
calls.push('handleFirstRunSetup');
|
|
||||||
},
|
|
||||||
prewarmSubtitleDictionaries: async () => {
|
prewarmSubtitleDictionaries: async () => {
|
||||||
calls.push('prewarmSubtitleDictionaries');
|
calls.push('prewarmSubtitleDictionaries');
|
||||||
},
|
},
|
||||||
@@ -51,13 +42,12 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
|||||||
},
|
},
|
||||||
texthookerOnlyMode: false,
|
texthookerOnlyMode: false,
|
||||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
|
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
|
||||||
setVisibleOverlayVisible: (visible) => calls.push(`setVisibleOverlayVisible:${visible}`),
|
|
||||||
initializeOverlayRuntime: () => calls.push('initializeOverlayRuntime'),
|
initializeOverlayRuntime: () => calls.push('initializeOverlayRuntime'),
|
||||||
handleInitialArgs: () => calls.push('handleInitialArgs'),
|
handleInitialArgs: () => calls.push('handleInitialArgs'),
|
||||||
logDebug: (message) => calls.push(`debug:${message}`),
|
logDebug: (message) => calls.push(`debug:${message}`),
|
||||||
now: () => 1000,
|
now: () => 1000,
|
||||||
...overrides,
|
...overrides,
|
||||||
} as AppReadyRuntimeDeps;
|
};
|
||||||
return { deps, calls };
|
return { deps, calls };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,14 +56,8 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
|||||||
hasMpvWebsocketPlugin: () => false,
|
hasMpvWebsocketPlugin: () => false,
|
||||||
});
|
});
|
||||||
await runAppReadyRuntime(deps);
|
await runAppReadyRuntime(deps);
|
||||||
assert.ok(calls.includes('ensureDefaultConfigBootstrap'));
|
|
||||||
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
|
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
|
||||||
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
|
|
||||||
assert.ok(calls.includes('setVisibleOverlayVisible:true'));
|
|
||||||
assert.ok(calls.includes('initializeOverlayRuntime'));
|
assert.ok(calls.includes('initializeOverlayRuntime'));
|
||||||
assert.ok(
|
|
||||||
calls.indexOf('setVisibleOverlayVisible:true') < calls.indexOf('initializeOverlayRuntime'),
|
|
||||||
);
|
|
||||||
assert.ok(calls.includes('startBackgroundWarmups'));
|
assert.ok(calls.includes('startBackgroundWarmups'));
|
||||||
assert.ok(
|
assert.ok(
|
||||||
calls.includes(
|
calls.includes(
|
||||||
@@ -82,46 +66,6 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('runAppReadyRuntime starts texthooker on startup when enabled in config', async () => {
|
|
||||||
const { deps, calls } = makeDeps({
|
|
||||||
getResolvedConfig: () => ({
|
|
||||||
websocket: { enabled: 'auto' },
|
|
||||||
secondarySub: {},
|
|
||||||
texthooker: { launchAtStartup: true },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
await runAppReadyRuntime(deps);
|
|
||||||
|
|
||||||
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
|
|
||||||
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
|
||||||
assert.ok(
|
|
||||||
calls.indexOf('createMpvClient') < calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678'),
|
|
||||||
);
|
|
||||||
assert.ok(
|
|
||||||
calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678') < calls.indexOf('handleInitialArgs'),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('runAppReadyRuntime keeps annotation websocket enabled when regular websocket auto-skips', async () => {
|
|
||||||
const { deps, calls } = makeDeps({
|
|
||||||
getResolvedConfig: () => ({
|
|
||||||
websocket: { enabled: 'auto' },
|
|
||||||
annotationWebsocket: { enabled: true, port: 6678 },
|
|
||||||
secondarySub: {},
|
|
||||||
texthooker: { launchAtStartup: true },
|
|
||||||
}),
|
|
||||||
hasMpvWebsocketPlugin: () => true,
|
|
||||||
});
|
|
||||||
|
|
||||||
await runAppReadyRuntime(deps);
|
|
||||||
|
|
||||||
assert.equal(calls.includes('startSubtitleWebsocket:9001'), false);
|
|
||||||
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
|
|
||||||
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
|
|
||||||
assert.ok(calls.includes('log:mpv_websocket detected, skipping built-in WebSocket server'));
|
|
||||||
});
|
|
||||||
|
|
||||||
test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns true', async () => {
|
test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns true', async () => {
|
||||||
const { deps, calls } = makeDeps({
|
const { deps, calls } = makeDeps({
|
||||||
shouldSkipHeavyStartup: () => true,
|
shouldSkipHeavyStartup: () => true,
|
||||||
@@ -153,7 +97,6 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
|||||||
|
|
||||||
await runAppReadyRuntime(deps);
|
await runAppReadyRuntime(deps);
|
||||||
|
|
||||||
assert.equal(calls.includes('ensureDefaultConfigBootstrap'), true);
|
|
||||||
assert.equal(calls.includes('reloadConfig'), false);
|
assert.equal(calls.includes('reloadConfig'), false);
|
||||||
assert.equal(calls.includes('getResolvedConfig'), false);
|
assert.equal(calls.includes('getResolvedConfig'), false);
|
||||||
assert.equal(calls.includes('getConfigWarnings'), false);
|
assert.equal(calls.includes('getConfigWarnings'), false);
|
||||||
@@ -168,10 +111,7 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
|||||||
assert.equal(calls.includes('logConfigWarning'), false);
|
assert.equal(calls.includes('logConfigWarning'), false);
|
||||||
assert.equal(calls.includes('handleInitialArgs'), true);
|
assert.equal(calls.includes('handleInitialArgs'), true);
|
||||||
assert.equal(calls.includes('loadYomitanExtension'), true);
|
assert.equal(calls.includes('loadYomitanExtension'), true);
|
||||||
assert.equal(calls.includes('handleFirstRunSetup'), true);
|
|
||||||
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleInitialArgs'));
|
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleInitialArgs'));
|
||||||
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleFirstRunSetup'));
|
|
||||||
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {
|
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
setup: false,
|
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
@@ -97,9 +96,6 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
|
|||||||
openYomitanSettingsDelayed: (delayMs) => {
|
openYomitanSettingsDelayed: (delayMs) => {
|
||||||
calls.push(`openYomitanSettingsDelayed:${delayMs}`);
|
calls.push(`openYomitanSettingsDelayed:${delayMs}`);
|
||||||
},
|
},
|
||||||
openFirstRunSetup: () => {
|
|
||||||
calls.push('openFirstRunSetup');
|
|
||||||
},
|
|
||||||
setVisibleOverlayVisible: (visible) => {
|
setVisibleOverlayVisible: (visible) => {
|
||||||
calls.push(`setVisibleOverlayVisible:${visible}`);
|
calls.push(`setVisibleOverlayVisible:${visible}`);
|
||||||
},
|
},
|
||||||
@@ -233,16 +229,6 @@ test('handleCliCommand processes --start for second-instance when overlay runtim
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleCliCommand opens first-run setup window for --setup', () => {
|
|
||||||
const { deps, calls } = createDeps();
|
|
||||||
|
|
||||||
handleCliCommand(makeArgs({ setup: true }), 'initial', deps);
|
|
||||||
|
|
||||||
assert.ok(calls.includes('openFirstRunSetup'));
|
|
||||||
assert.ok(calls.includes('log:Opened first-run setup flow.'));
|
|
||||||
assert.equal(calls.includes('openYomitanSettingsDelayed:1000'), false);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('handleCliCommand applies cli log level for second-instance commands', () => {
|
test('handleCliCommand applies cli log level for second-instance commands', () => {
|
||||||
const { deps, calls } = createDeps({
|
const { deps, calls } = createDeps({
|
||||||
setLogLevel: (level) => {
|
setLogLevel: (level) => {
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ export interface CliCommandServiceDeps {
|
|||||||
isOverlayRuntimeInitialized: () => boolean;
|
isOverlayRuntimeInitialized: () => boolean;
|
||||||
initializeOverlayRuntime: () => void;
|
initializeOverlayRuntime: () => void;
|
||||||
toggleVisibleOverlay: () => void;
|
toggleVisibleOverlay: () => void;
|
||||||
openFirstRunSetup: () => void;
|
|
||||||
openYomitanSettingsDelayed: (delayMs: number) => void;
|
openYomitanSettingsDelayed: (delayMs: number) => void;
|
||||||
setVisibleOverlayVisible: (visible: boolean) => void;
|
setVisibleOverlayVisible: (visible: boolean) => void;
|
||||||
copyCurrentSubtitle: () => void;
|
copyCurrentSubtitle: () => void;
|
||||||
@@ -116,7 +115,6 @@ interface MiningCliRuntime {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface UiCliRuntime {
|
interface UiCliRuntime {
|
||||||
openFirstRunSetup: () => void;
|
|
||||||
openYomitanSettings: () => void;
|
openYomitanSettings: () => void;
|
||||||
cycleSecondarySubMode: () => void;
|
cycleSecondarySubMode: () => void;
|
||||||
openRuntimeOptionsPalette: () => void;
|
openRuntimeOptionsPalette: () => void;
|
||||||
@@ -197,7 +195,6 @@ export function createCliCommandDepsRuntime(
|
|||||||
isOverlayRuntimeInitialized: options.overlay.isInitialized,
|
isOverlayRuntimeInitialized: options.overlay.isInitialized,
|
||||||
initializeOverlayRuntime: options.overlay.initialize,
|
initializeOverlayRuntime: options.overlay.initialize,
|
||||||
toggleVisibleOverlay: options.overlay.toggleVisible,
|
toggleVisibleOverlay: options.overlay.toggleVisible,
|
||||||
openFirstRunSetup: options.ui.openFirstRunSetup,
|
|
||||||
openYomitanSettingsDelayed: (delayMs) => {
|
openYomitanSettingsDelayed: (delayMs) => {
|
||||||
options.schedule(() => {
|
options.schedule(() => {
|
||||||
options.ui.openYomitanSettings();
|
options.ui.openYomitanSettings();
|
||||||
@@ -261,8 +258,7 @@ export function handleCliCommand(
|
|||||||
|
|
||||||
const ignoreSecondInstanceStart =
|
const ignoreSecondInstanceStart =
|
||||||
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
|
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
|
||||||
const shouldStart =
|
const shouldStart = (!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
|
||||||
(!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
|
|
||||||
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
|
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
|
||||||
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;
|
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;
|
||||||
|
|
||||||
@@ -302,9 +298,6 @@ export function handleCliCommand(
|
|||||||
|
|
||||||
if (args.toggle || args.toggleVisibleOverlay) {
|
if (args.toggle || args.toggleVisibleOverlay) {
|
||||||
deps.toggleVisibleOverlay();
|
deps.toggleVisibleOverlay();
|
||||||
} else if (args.setup) {
|
|
||||||
deps.openFirstRunSetup();
|
|
||||||
deps.log('Opened first-run setup flow.');
|
|
||||||
} else if (args.settings) {
|
} else if (args.settings) {
|
||||||
deps.openYomitanSettingsDelayed(1000);
|
deps.openYomitanSettingsDelayed(1000);
|
||||||
} else if (args.show || args.showVisibleOverlay) {
|
} else if (args.show || args.showVisibleOverlay) {
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
|
|||||||
mpvSendCommand: (command) => {
|
mpvSendCommand: (command) => {
|
||||||
sentCommands.push(command);
|
sentCommands.push(command);
|
||||||
},
|
},
|
||||||
resolveProxyCommandOsd: async () => null,
|
|
||||||
isMpvConnected: () => true,
|
isMpvConnected: () => true,
|
||||||
hasRuntimeOptionsManager: () => true,
|
hasRuntimeOptionsManager: () => true,
|
||||||
...overrides,
|
...overrides,
|
||||||
@@ -53,39 +52,30 @@ test('handleMpvCommandFromIpc forwards regular mpv commands', () => {
|
|||||||
assert.deepEqual(osd, []);
|
assert.deepEqual(osd, []);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', async () => {
|
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', () => {
|
||||||
const { options, sentCommands, osd } = createOptions();
|
const { options, sentCommands, osd } = createOptions();
|
||||||
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
|
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
|
||||||
await new Promise((resolve) => setImmediate(resolve));
|
|
||||||
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
|
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
|
||||||
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
|
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits resolved osd for primary subtitle track keybinding proxies', async () => {
|
test('handleMpvCommandFromIpc emits osd for primary subtitle track keybinding proxies', () => {
|
||||||
const { options, sentCommands, osd } = createOptions({
|
const { options, sentCommands, osd } = createOptions();
|
||||||
resolveProxyCommandOsd: async () => 'Subtitle track: Internal #3 - Japanese (active)',
|
|
||||||
});
|
|
||||||
handleMpvCommandFromIpc(['cycle', 'sid'], options);
|
handleMpvCommandFromIpc(['cycle', 'sid'], options);
|
||||||
await new Promise((resolve) => setImmediate(resolve));
|
|
||||||
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
|
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
|
||||||
assert.deepEqual(osd, ['Subtitle track: Internal #3 - Japanese (active)']);
|
assert.deepEqual(osd, ['Subtitle track: ${sid}']);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits resolved osd for secondary subtitle track keybinding proxies', async () => {
|
test('handleMpvCommandFromIpc emits osd for secondary subtitle track keybinding proxies', () => {
|
||||||
const { options, sentCommands, osd } = createOptions({
|
const { options, sentCommands, osd } = createOptions();
|
||||||
resolveProxyCommandOsd: async () =>
|
|
||||||
'Secondary subtitle track: External #8 - English Commentary',
|
|
||||||
});
|
|
||||||
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
|
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
|
||||||
await new Promise((resolve) => setImmediate(resolve));
|
|
||||||
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
|
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
|
||||||
assert.deepEqual(osd, ['Secondary subtitle track: External #8 - English Commentary']);
|
assert.deepEqual(osd, ['Secondary subtitle track: ${secondary-sid}']);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', async () => {
|
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', () => {
|
||||||
const { options, sentCommands, osd } = createOptions();
|
const { options, sentCommands, osd } = createOptions();
|
||||||
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
|
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
|
||||||
await new Promise((resolve) => setImmediate(resolve));
|
|
||||||
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
|
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
|
||||||
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
|
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ export interface HandleMpvCommandFromIpcOptions {
|
|||||||
mpvPlayNextSubtitle: () => void;
|
mpvPlayNextSubtitle: () => void;
|
||||||
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
||||||
mpvSendCommand: (command: (string | number)[]) => void;
|
mpvSendCommand: (command: (string | number)[]) => void;
|
||||||
resolveProxyCommandOsd?: (command: (string | number)[]) => Promise<string | null>;
|
|
||||||
isMpvConnected: () => boolean;
|
isMpvConnected: () => boolean;
|
||||||
hasRuntimeOptionsManager: () => boolean;
|
hasRuntimeOptionsManager: () => boolean;
|
||||||
}
|
}
|
||||||
@@ -37,7 +36,7 @@ const MPV_PROPERTY_COMMANDS = new Set([
|
|||||||
'multiply',
|
'multiply',
|
||||||
]);
|
]);
|
||||||
|
|
||||||
function resolveProxyCommandOsdTemplate(command: (string | number)[]): string | null {
|
function resolveProxyCommandOsd(command: (string | number)[]): string | null {
|
||||||
const operation = typeof command[0] === 'string' ? command[0] : '';
|
const operation = typeof command[0] === 'string' ? command[0] : '';
|
||||||
const property = typeof command[1] === 'string' ? command[1] : '';
|
const property = typeof command[1] === 'string' ? command[1] : '';
|
||||||
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
|
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
|
||||||
@@ -56,25 +55,6 @@ function resolveProxyCommandOsdTemplate(command: (string | number)[]): string |
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function showResolvedProxyCommandOsd(
|
|
||||||
command: (string | number)[],
|
|
||||||
options: HandleMpvCommandFromIpcOptions,
|
|
||||||
): void {
|
|
||||||
const template = resolveProxyCommandOsdTemplate(command);
|
|
||||||
if (!template) return;
|
|
||||||
|
|
||||||
const emit = async () => {
|
|
||||||
try {
|
|
||||||
const resolved = await options.resolveProxyCommandOsd?.(command);
|
|
||||||
options.showMpvOsd(resolved || template);
|
|
||||||
} catch {
|
|
||||||
options.showMpvOsd(template);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
void emit();
|
|
||||||
}
|
|
||||||
|
|
||||||
export function handleMpvCommandFromIpc(
|
export function handleMpvCommandFromIpc(
|
||||||
command: (string | number)[],
|
command: (string | number)[],
|
||||||
options: HandleMpvCommandFromIpcOptions,
|
options: HandleMpvCommandFromIpcOptions,
|
||||||
@@ -123,7 +103,10 @@ export function handleMpvCommandFromIpc(
|
|||||||
options.mpvPlayNextSubtitle();
|
options.mpvPlayNextSubtitle();
|
||||||
} else {
|
} else {
|
||||||
options.mpvSendCommand(command);
|
options.mpvSendCommand(command);
|
||||||
showResolvedProxyCommandOsd(command, options);
|
const osd = resolveProxyCommandOsd(command);
|
||||||
|
if (osd) {
|
||||||
|
options.showMpvOsd(osd);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,22 +22,6 @@ test('showMpvOsdRuntime sends show-text when connected', () => {
|
|||||||
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
|
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('showMpvOsdRuntime enables property expansion for placeholder-based messages', () => {
|
|
||||||
const commands: (string | number)[][] = [];
|
|
||||||
showMpvOsdRuntime(
|
|
||||||
{
|
|
||||||
connected: true,
|
|
||||||
send: ({ command }) => {
|
|
||||||
commands.push(command);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'Subtitle delay: ${sub-delay}',
|
|
||||||
);
|
|
||||||
assert.deepEqual(commands, [
|
|
||||||
['expand-properties', 'show-text', 'Subtitle delay: ${sub-delay}', '3000'],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('showMpvOsdRuntime logs fallback when disconnected', () => {
|
test('showMpvOsdRuntime logs fallback when disconnected', () => {
|
||||||
const logs: string[] = [];
|
const logs: string[] = [];
|
||||||
showMpvOsdRuntime(
|
showMpvOsdRuntime(
|
||||||
|
|||||||
@@ -53,10 +53,7 @@ export function showMpvOsdRuntime(
|
|||||||
fallbackLog: (text: string) => void = (line) => logger.info(line),
|
fallbackLog: (text: string) => void = (line) => logger.info(line),
|
||||||
): void {
|
): void {
|
||||||
if (mpvClient && mpvClient.connected) {
|
if (mpvClient && mpvClient.connected) {
|
||||||
const command = text.includes('${')
|
mpvClient.send({ command: ['show-text', text, '3000'] });
|
||||||
? ['expand-properties', 'show-text', text, '3000']
|
|
||||||
: ['show-text', text, '3000'];
|
|
||||||
mpvClient.send({ command });
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
fallbackLog(`OSD (MPV not connected): ${text}`);
|
fallbackLog(`OSD (MPV not connected): ${text}`);
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
setup: false,
|
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
|
|||||||
@@ -69,13 +69,6 @@ export function runStartupBootstrapRuntime(
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface AppReadyConfigLike {
|
interface AppReadyConfigLike {
|
||||||
annotationWebsocket?: {
|
|
||||||
enabled?: boolean;
|
|
||||||
port?: number;
|
|
||||||
};
|
|
||||||
texthooker?: {
|
|
||||||
launchAtStartup?: boolean;
|
|
||||||
};
|
|
||||||
secondarySub?: {
|
secondarySub?: {
|
||||||
defaultMode?: SecondarySubMode;
|
defaultMode?: SecondarySubMode;
|
||||||
};
|
};
|
||||||
@@ -99,7 +92,6 @@ interface AppReadyConfigLike {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface AppReadyRuntimeDeps {
|
export interface AppReadyRuntimeDeps {
|
||||||
ensureDefaultConfigBootstrap: () => void;
|
|
||||||
loadSubtitlePosition: () => void;
|
loadSubtitlePosition: () => void;
|
||||||
resolveKeybindings: () => void;
|
resolveKeybindings: () => void;
|
||||||
createMpvClient: () => void;
|
createMpvClient: () => void;
|
||||||
@@ -112,24 +104,18 @@ export interface AppReadyRuntimeDeps {
|
|||||||
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
||||||
defaultSecondarySubMode: SecondarySubMode;
|
defaultSecondarySubMode: SecondarySubMode;
|
||||||
defaultWebsocketPort: number;
|
defaultWebsocketPort: number;
|
||||||
defaultAnnotationWebsocketPort: number;
|
|
||||||
defaultTexthookerPort: number;
|
|
||||||
hasMpvWebsocketPlugin: () => boolean;
|
hasMpvWebsocketPlugin: () => boolean;
|
||||||
startSubtitleWebsocket: (port: number) => void;
|
startSubtitleWebsocket: (port: number) => void;
|
||||||
startAnnotationWebsocket: (port: number) => void;
|
|
||||||
startTexthooker: (port: number, websocketUrl?: string) => void;
|
|
||||||
log: (message: string) => void;
|
log: (message: string) => void;
|
||||||
createMecabTokenizerAndCheck: () => Promise<void>;
|
createMecabTokenizerAndCheck: () => Promise<void>;
|
||||||
createSubtitleTimingTracker: () => void;
|
createSubtitleTimingTracker: () => void;
|
||||||
createImmersionTracker?: () => void;
|
createImmersionTracker?: () => void;
|
||||||
startJellyfinRemoteSession?: () => Promise<void>;
|
startJellyfinRemoteSession?: () => Promise<void>;
|
||||||
loadYomitanExtension: () => Promise<void>;
|
loadYomitanExtension: () => Promise<void>;
|
||||||
handleFirstRunSetup: () => Promise<void>;
|
|
||||||
prewarmSubtitleDictionaries?: () => Promise<void>;
|
prewarmSubtitleDictionaries?: () => Promise<void>;
|
||||||
startBackgroundWarmups: () => void;
|
startBackgroundWarmups: () => void;
|
||||||
texthookerOnlyMode: boolean;
|
texthookerOnlyMode: boolean;
|
||||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => boolean;
|
shouldAutoInitializeOverlayRuntimeFromConfig: () => boolean;
|
||||||
setVisibleOverlayVisible: (visible: boolean) => void;
|
|
||||||
initializeOverlayRuntime: () => void;
|
initializeOverlayRuntime: () => void;
|
||||||
handleInitialArgs: () => void;
|
handleInitialArgs: () => void;
|
||||||
logDebug?: (message: string) => void;
|
logDebug?: (message: string) => void;
|
||||||
@@ -182,10 +168,8 @@ export function isAutoUpdateEnabledRuntime(
|
|||||||
export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<void> {
|
export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<void> {
|
||||||
const now = deps.now ?? (() => Date.now());
|
const now = deps.now ?? (() => Date.now());
|
||||||
const startupStartedAtMs = now();
|
const startupStartedAtMs = now();
|
||||||
deps.ensureDefaultConfigBootstrap();
|
|
||||||
if (deps.shouldSkipHeavyStartup?.()) {
|
if (deps.shouldSkipHeavyStartup?.()) {
|
||||||
await deps.loadYomitanExtension();
|
await deps.loadYomitanExtension();
|
||||||
await deps.handleFirstRunSetup();
|
|
||||||
deps.handleInitialArgs();
|
deps.handleInitialArgs();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -194,7 +178,6 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
|
|
||||||
if (deps.shouldSkipHeavyStartup?.()) {
|
if (deps.shouldSkipHeavyStartup?.()) {
|
||||||
await deps.loadYomitanExtension();
|
await deps.loadYomitanExtension();
|
||||||
await deps.handleFirstRunSetup();
|
|
||||||
deps.handleInitialArgs();
|
deps.handleInitialArgs();
|
||||||
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
||||||
return;
|
return;
|
||||||
@@ -226,11 +209,6 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
const wsConfig = config.websocket || {};
|
const wsConfig = config.websocket || {};
|
||||||
const wsEnabled = wsConfig.enabled ?? 'auto';
|
const wsEnabled = wsConfig.enabled ?? 'auto';
|
||||||
const wsPort = wsConfig.port || deps.defaultWebsocketPort;
|
const wsPort = wsConfig.port || deps.defaultWebsocketPort;
|
||||||
const annotationWsConfig = config.annotationWebsocket || {};
|
|
||||||
const annotationWsEnabled = annotationWsConfig.enabled !== false;
|
|
||||||
const annotationWsPort = annotationWsConfig.port || deps.defaultAnnotationWebsocketPort;
|
|
||||||
const texthookerPort = deps.defaultTexthookerPort;
|
|
||||||
let texthookerWebsocketUrl: string | undefined;
|
|
||||||
|
|
||||||
if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
||||||
deps.startSubtitleWebsocket(wsPort);
|
deps.startSubtitleWebsocket(wsPort);
|
||||||
@@ -238,17 +216,6 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
deps.log('mpv_websocket detected, skipping built-in WebSocket server');
|
deps.log('mpv_websocket detected, skipping built-in WebSocket server');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (annotationWsEnabled) {
|
|
||||||
deps.startAnnotationWebsocket(annotationWsPort);
|
|
||||||
texthookerWebsocketUrl = `ws://127.0.0.1:${annotationWsPort}`;
|
|
||||||
} else if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
|
||||||
texthookerWebsocketUrl = `ws://127.0.0.1:${wsPort}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.texthooker?.launchAtStartup !== false) {
|
|
||||||
deps.startTexthooker(texthookerPort, texthookerWebsocketUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
deps.createSubtitleTimingTracker();
|
deps.createSubtitleTimingTracker();
|
||||||
if (deps.createImmersionTracker) {
|
if (deps.createImmersionTracker) {
|
||||||
deps.log('Runtime ready: immersion tracker startup deferred until first media activity.');
|
deps.log('Runtime ready: immersion tracker startup deferred until first media activity.');
|
||||||
@@ -259,14 +226,11 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
if (deps.texthookerOnlyMode) {
|
if (deps.texthookerOnlyMode) {
|
||||||
deps.log('Texthooker-only mode enabled; skipping overlay window.');
|
deps.log('Texthooker-only mode enabled; skipping overlay window.');
|
||||||
} else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) {
|
} else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) {
|
||||||
deps.setVisibleOverlayVisible(true);
|
|
||||||
deps.initializeOverlayRuntime();
|
deps.initializeOverlayRuntime();
|
||||||
} else {
|
} else {
|
||||||
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
|
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
|
||||||
}
|
}
|
||||||
|
|
||||||
await deps.loadYomitanExtension();
|
|
||||||
await deps.handleFirstRunSetup();
|
|
||||||
deps.handleInitialArgs();
|
deps.handleInitialArgs();
|
||||||
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import assert from 'node:assert/strict';
|
import assert from 'node:assert/strict';
|
||||||
import {
|
import { serializeSubtitleMarkup, serializeSubtitleWebsocketMessage } from './subtitle-ws';
|
||||||
serializeInitialSubtitleWebsocketMessage,
|
|
||||||
serializeSubtitleMarkup,
|
|
||||||
serializeSubtitleWebsocketMessage,
|
|
||||||
} from './subtitle-ws';
|
|
||||||
import { PartOfSpeech, type SubtitleData } from '../../types';
|
import { PartOfSpeech, type SubtitleData } from '../../types';
|
||||||
|
|
||||||
const frequencyOptions = {
|
const frequencyOptions = {
|
||||||
@@ -82,51 +78,6 @@ test('serializeSubtitleMarkup includes known, n+1, jlpt, and frequency classes',
|
|||||||
assert.match(markup, /word word-frequency-band-1/);
|
assert.match(markup, /word word-frequency-band-1/);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence', () => {
|
|
||||||
const payload: SubtitleData = {
|
|
||||||
text: 'ignored',
|
|
||||||
tokens: [
|
|
||||||
{
|
|
||||||
surface: '無事',
|
|
||||||
reading: 'ぶじ',
|
|
||||||
headword: '無事',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 2,
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
isMerged: false,
|
|
||||||
isKnown: true,
|
|
||||||
isNPlusOneTarget: false,
|
|
||||||
jlptLevel: 'N2',
|
|
||||||
frequencyRank: 745,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
surface: 'アレクシア',
|
|
||||||
reading: 'あれくしあ',
|
|
||||||
headword: 'アレクシア',
|
|
||||||
startPos: 2,
|
|
||||||
endPos: 7,
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
isMerged: false,
|
|
||||||
isKnown: false,
|
|
||||||
isNPlusOneTarget: false,
|
|
||||||
isNameMatch: true,
|
|
||||||
frequencyRank: 12,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
const markup = serializeSubtitleMarkup(payload, frequencyOptions);
|
|
||||||
assert.match(
|
|
||||||
markup,
|
|
||||||
/<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事<\/span>/,
|
|
||||||
);
|
|
||||||
assert.match(
|
|
||||||
markup,
|
|
||||||
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア" data-frequency-rank="12">アレクシア<\/span>/,
|
|
||||||
);
|
|
||||||
assert.doesNotMatch(markup, /word-name-match word-known|word-known word-name-match/);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
||||||
const payload: SubtitleData = {
|
const payload: SubtitleData = {
|
||||||
text: '字幕',
|
text: '字幕',
|
||||||
@@ -134,101 +85,5 @@ test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||||
assert.deepEqual(JSON.parse(raw), {
|
assert.deepEqual(JSON.parse(raw), { sentence: '字幕' });
|
||||||
version: 1,
|
|
||||||
text: '字幕',
|
|
||||||
sentence: '字幕',
|
|
||||||
tokens: [],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test('serializeSubtitleWebsocketMessage emits structured token api payload', () => {
|
|
||||||
const payload: SubtitleData = {
|
|
||||||
text: '無事',
|
|
||||||
tokens: [
|
|
||||||
{
|
|
||||||
surface: '無事',
|
|
||||||
reading: 'ぶじ',
|
|
||||||
headword: '無事',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 2,
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
isMerged: false,
|
|
||||||
isKnown: true,
|
|
||||||
isNPlusOneTarget: false,
|
|
||||||
jlptLevel: 'N2',
|
|
||||||
frequencyRank: 745,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
|
||||||
assert.deepEqual(JSON.parse(raw), {
|
|
||||||
version: 1,
|
|
||||||
text: '無事',
|
|
||||||
sentence:
|
|
||||||
'<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事</span>',
|
|
||||||
tokens: [
|
|
||||||
{
|
|
||||||
surface: '無事',
|
|
||||||
reading: 'ぶじ',
|
|
||||||
headword: '無事',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 2,
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
isMerged: false,
|
|
||||||
isKnown: true,
|
|
||||||
isNPlusOneTarget: false,
|
|
||||||
isNameMatch: false,
|
|
||||||
jlptLevel: 'N2',
|
|
||||||
frequencyRank: 745,
|
|
||||||
className: 'word word-known word-jlpt-n2',
|
|
||||||
frequencyRankLabel: '745',
|
|
||||||
jlptLevelLabel: 'N2',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test('serializeInitialSubtitleWebsocketMessage keeps annotated current subtitle content', () => {
|
|
||||||
const payload: SubtitleData = {
|
|
||||||
text: 'ignored fallback',
|
|
||||||
tokens: [
|
|
||||||
{
|
|
||||||
surface: '既知',
|
|
||||||
reading: '',
|
|
||||||
headword: '',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 2,
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
isMerged: false,
|
|
||||||
isKnown: true,
|
|
||||||
isNPlusOneTarget: false,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
const raw = serializeInitialSubtitleWebsocketMessage(payload, frequencyOptions);
|
|
||||||
assert.deepEqual(JSON.parse(raw ?? ''), {
|
|
||||||
version: 1,
|
|
||||||
text: 'ignored fallback',
|
|
||||||
sentence: '<span class="word word-known">既知</span>',
|
|
||||||
tokens: [
|
|
||||||
{
|
|
||||||
surface: '既知',
|
|
||||||
reading: '',
|
|
||||||
headword: '',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 2,
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
isMerged: false,
|
|
||||||
isKnown: true,
|
|
||||||
isNPlusOneTarget: false,
|
|
||||||
isNameMatch: false,
|
|
||||||
className: 'word word-known',
|
|
||||||
frequencyRankLabel: null,
|
|
||||||
jlptLevelLabel: null,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -18,26 +18,6 @@ export type SubtitleWebsocketFrequencyOptions = {
|
|||||||
mode: 'single' | 'banded';
|
mode: 'single' | 'banded';
|
||||||
};
|
};
|
||||||
|
|
||||||
type SerializedSubtitleToken = Pick<
|
|
||||||
MergedToken,
|
|
||||||
| 'surface'
|
|
||||||
| 'reading'
|
|
||||||
| 'headword'
|
|
||||||
| 'startPos'
|
|
||||||
| 'endPos'
|
|
||||||
| 'partOfSpeech'
|
|
||||||
| 'isMerged'
|
|
||||||
| 'isKnown'
|
|
||||||
| 'isNPlusOneTarget'
|
|
||||||
| 'frequencyRank'
|
|
||||||
| 'jlptLevel'
|
|
||||||
> & {
|
|
||||||
isNameMatch: boolean;
|
|
||||||
className: string;
|
|
||||||
frequencyRankLabel: string | null;
|
|
||||||
jlptLevelLabel: string | null;
|
|
||||||
};
|
|
||||||
|
|
||||||
function escapeHtml(text: string): string {
|
function escapeHtml(text: string): string {
|
||||||
return text
|
return text
|
||||||
.replaceAll('&', '&')
|
.replaceAll('&', '&')
|
||||||
@@ -66,29 +46,11 @@ function computeFrequencyClass(
|
|||||||
return 'word-frequency-single';
|
return 'word-frequency-single';
|
||||||
}
|
}
|
||||||
|
|
||||||
function getFrequencyRankLabel(
|
|
||||||
token: MergedToken,
|
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
|
||||||
): string | null {
|
|
||||||
if (!options.enabled) return null;
|
|
||||||
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
|
|
||||||
|
|
||||||
const rank = Math.max(1, Math.floor(token.frequencyRank));
|
|
||||||
const topX = Math.max(1, Math.floor(options.topX));
|
|
||||||
return rank <= topX ? String(rank) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getJlptLevelLabel(token: MergedToken): string | null {
|
|
||||||
return token.jlptLevel ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
|
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
|
||||||
const classes = ['word'];
|
const classes = ['word'];
|
||||||
|
|
||||||
if (token.isNPlusOneTarget) {
|
if (token.isNPlusOneTarget) {
|
||||||
classes.push('word-n-plus-one');
|
classes.push('word-n-plus-one');
|
||||||
} else if (token.isNameMatch) {
|
|
||||||
classes.push('word-name-match');
|
|
||||||
} else if (token.isKnown) {
|
} else if (token.isKnown) {
|
||||||
classes.push('word-known');
|
classes.push('word-known');
|
||||||
}
|
}
|
||||||
@@ -97,7 +59,7 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
|
|||||||
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
|
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!token.isKnown && !token.isNPlusOneTarget && !token.isNameMatch) {
|
if (!token.isKnown && !token.isNPlusOneTarget) {
|
||||||
const frequencyClass = computeFrequencyClass(token, options);
|
const frequencyClass = computeFrequencyClass(token, options);
|
||||||
if (frequencyClass) {
|
if (frequencyClass) {
|
||||||
classes.push(frequencyClass);
|
classes.push(frequencyClass);
|
||||||
@@ -107,55 +69,6 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
|
|||||||
return classes.join(' ');
|
return classes.join(' ');
|
||||||
}
|
}
|
||||||
|
|
||||||
function serializeWordDataAttributes(
|
|
||||||
token: MergedToken,
|
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
|
||||||
): string {
|
|
||||||
const attributes: string[] = [];
|
|
||||||
|
|
||||||
if (token.reading) {
|
|
||||||
attributes.push(`data-reading="${escapeHtml(token.reading)}"`);
|
|
||||||
}
|
|
||||||
if (token.headword) {
|
|
||||||
attributes.push(`data-headword="${escapeHtml(token.headword)}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const frequencyRankLabel = getFrequencyRankLabel(token, options);
|
|
||||||
if (frequencyRankLabel) {
|
|
||||||
attributes.push(`data-frequency-rank="${escapeHtml(frequencyRankLabel)}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const jlptLevelLabel = getJlptLevelLabel(token);
|
|
||||||
if (jlptLevelLabel) {
|
|
||||||
attributes.push(`data-jlpt-level="${escapeHtml(jlptLevelLabel)}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return attributes.length > 0 ? ` ${attributes.join(' ')}` : '';
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeSubtitleToken(
|
|
||||||
token: MergedToken,
|
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
|
||||||
): SerializedSubtitleToken {
|
|
||||||
return {
|
|
||||||
surface: token.surface,
|
|
||||||
reading: token.reading,
|
|
||||||
headword: token.headword,
|
|
||||||
startPos: token.startPos,
|
|
||||||
endPos: token.endPos,
|
|
||||||
partOfSpeech: token.partOfSpeech,
|
|
||||||
isMerged: token.isMerged,
|
|
||||||
isKnown: token.isKnown,
|
|
||||||
isNPlusOneTarget: token.isNPlusOneTarget,
|
|
||||||
isNameMatch: token.isNameMatch ?? false,
|
|
||||||
jlptLevel: token.jlptLevel,
|
|
||||||
frequencyRank: token.frequencyRank,
|
|
||||||
className: computeWordClass(token, options),
|
|
||||||
frequencyRankLabel: getFrequencyRankLabel(token, options),
|
|
||||||
jlptLevelLabel: getJlptLevelLabel(token),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function serializeSubtitleMarkup(
|
export function serializeSubtitleMarkup(
|
||||||
payload: SubtitleData,
|
payload: SubtitleData,
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
@@ -167,12 +80,11 @@ export function serializeSubtitleMarkup(
|
|||||||
const chunks: string[] = [];
|
const chunks: string[] = [];
|
||||||
for (const token of payload.tokens) {
|
for (const token of payload.tokens) {
|
||||||
const klass = computeWordClass(token, options);
|
const klass = computeWordClass(token, options);
|
||||||
const attrs = serializeWordDataAttributes(token, options);
|
|
||||||
const parts = token.surface.split('\n');
|
const parts = token.surface.split('\n');
|
||||||
for (let index = 0; index < parts.length; index += 1) {
|
for (let index = 0; index < parts.length; index += 1) {
|
||||||
const part = parts[index];
|
const part = parts[index];
|
||||||
if (part) {
|
if (part) {
|
||||||
chunks.push(`<span class="${klass}"${attrs}>${escapeHtml(part)}</span>`);
|
chunks.push(`<span class="${klass}">${escapeHtml(part)}</span>`);
|
||||||
}
|
}
|
||||||
if (index < parts.length - 1) {
|
if (index < parts.length - 1) {
|
||||||
chunks.push('<br>');
|
chunks.push('<br>');
|
||||||
@@ -187,23 +99,7 @@ export function serializeSubtitleWebsocketMessage(
|
|||||||
payload: SubtitleData,
|
payload: SubtitleData,
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
): string {
|
): string {
|
||||||
return JSON.stringify({
|
return JSON.stringify({ sentence: serializeSubtitleMarkup(payload, options) });
|
||||||
version: 1,
|
|
||||||
text: payload.text,
|
|
||||||
sentence: serializeSubtitleMarkup(payload, options),
|
|
||||||
tokens: payload.tokens?.map((token) => serializeSubtitleToken(token, options)) ?? [],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function serializeInitialSubtitleWebsocketMessage(
|
|
||||||
payload: SubtitleData | null,
|
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
|
||||||
): string | null {
|
|
||||||
if (!payload || !payload.text.trim()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return serializeSubtitleWebsocketMessage(payload, options);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class SubtitleWebSocket {
|
export class SubtitleWebSocket {
|
||||||
@@ -218,11 +114,7 @@ export class SubtitleWebSocket {
|
|||||||
return (this.server?.clients.size ?? 0) > 0;
|
return (this.server?.clients.size ?? 0) > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public start(
|
public start(port: number, getCurrentSubtitleText: () => string): void {
|
||||||
port: number,
|
|
||||||
getCurrentSubtitleData: () => SubtitleData | null,
|
|
||||||
getFrequencyOptions: () => SubtitleWebsocketFrequencyOptions,
|
|
||||||
): void {
|
|
||||||
this.server = new WebSocket.Server({ port, host: '127.0.0.1' });
|
this.server = new WebSocket.Server({ port, host: '127.0.0.1' });
|
||||||
|
|
||||||
this.server.on('connection', (ws: WebSocket) => {
|
this.server.on('connection', (ws: WebSocket) => {
|
||||||
@@ -232,12 +124,9 @@ export class SubtitleWebSocket {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentMessage = serializeInitialSubtitleWebsocketMessage(
|
const currentText = getCurrentSubtitleText();
|
||||||
getCurrentSubtitleData(),
|
if (currentText) {
|
||||||
getFrequencyOptions(),
|
ws.send(JSON.stringify({ sentence: currentText }));
|
||||||
);
|
|
||||||
if (currentMessage) {
|
|
||||||
ws.send(currentMessage);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
import assert from 'node:assert/strict';
|
|
||||||
import test from 'node:test';
|
|
||||||
import { injectTexthookerBootstrapHtml } from './texthooker';
|
|
||||||
|
|
||||||
test('injectTexthookerBootstrapHtml injects websocket bootstrap before head close', () => {
|
|
||||||
const html = '<html><head><title>Texthooker</title></head><body></body></html>';
|
|
||||||
|
|
||||||
const actual = injectTexthookerBootstrapHtml(html, 'ws://127.0.0.1:6678');
|
|
||||||
|
|
||||||
assert.match(
|
|
||||||
actual,
|
|
||||||
/window\.localStorage\.setItem\('bannou-texthooker-websocketUrl', "ws:\/\/127\.0\.0\.1:6678"\)/,
|
|
||||||
);
|
|
||||||
assert.ok(actual.indexOf('</script></head>') !== -1);
|
|
||||||
assert.ok(actual.includes('bannou-texthooker-websocketUrl'));
|
|
||||||
assert.ok(!actual.includes('bannou-texthooker-enableKnownWordColoring'));
|
|
||||||
assert.ok(!actual.includes('bannou-texthooker-enableNPlusOneColoring'));
|
|
||||||
assert.ok(!actual.includes('bannou-texthooker-enableNameMatchColoring'));
|
|
||||||
assert.ok(!actual.includes('bannou-texthooker-enableFrequencyColoring'));
|
|
||||||
assert.ok(!actual.includes('bannou-texthooker-enableJlptColoring'));
|
|
||||||
});
|
|
||||||
|
|
||||||
test('injectTexthookerBootstrapHtml leaves html unchanged without websocketUrl', () => {
|
|
||||||
const html = '<html><head></head><body></body></html>';
|
|
||||||
|
|
||||||
assert.equal(injectTexthookerBootstrapHtml(html), html);
|
|
||||||
});
|
|
||||||
@@ -5,22 +5,6 @@ import { createLogger } from '../../logger';
|
|||||||
|
|
||||||
const logger = createLogger('main:texthooker');
|
const logger = createLogger('main:texthooker');
|
||||||
|
|
||||||
export function injectTexthookerBootstrapHtml(html: string, websocketUrl?: string): string {
|
|
||||||
if (!websocketUrl) {
|
|
||||||
return html;
|
|
||||||
}
|
|
||||||
|
|
||||||
const bootstrapScript = `<script>window.localStorage.setItem('bannou-texthooker-websocketUrl', ${JSON.stringify(
|
|
||||||
websocketUrl,
|
|
||||||
)});</script>`;
|
|
||||||
|
|
||||||
if (html.includes('</head>')) {
|
|
||||||
return html.replace('</head>', `${bootstrapScript}</head>`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return `${bootstrapScript}${html}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Texthooker {
|
export class Texthooker {
|
||||||
private server: http.Server | null = null;
|
private server: http.Server | null = null;
|
||||||
|
|
||||||
@@ -28,11 +12,7 @@ export class Texthooker {
|
|||||||
return this.server !== null;
|
return this.server !== null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public start(port: number, websocketUrl?: string): http.Server | null {
|
public start(port: number): http.Server | null {
|
||||||
if (this.server) {
|
|
||||||
return this.server;
|
|
||||||
}
|
|
||||||
|
|
||||||
const texthookerPath = this.getTexthookerPath();
|
const texthookerPath = this.getTexthookerPath();
|
||||||
if (!texthookerPath) {
|
if (!texthookerPath) {
|
||||||
logger.error('texthooker-ui not found');
|
logger.error('texthooker-ui not found');
|
||||||
@@ -62,12 +42,8 @@ export class Texthooker {
|
|||||||
res.end('Not found');
|
res.end('Not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const responseData =
|
|
||||||
urlPath === '/' || urlPath === '/index.html'
|
|
||||||
? Buffer.from(injectTexthookerBootstrapHtml(data.toString('utf-8'), websocketUrl))
|
|
||||||
: data;
|
|
||||||
res.writeHead(200, { 'Content-Type': mimeTypes[ext] || 'text/plain' });
|
res.writeHead(200, { 'Content-Type': mimeTypes[ext] || 'text/plain' });
|
||||||
res.end(responseData);
|
res.end(data);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ interface YomitanTokenInput {
|
|||||||
surface: string;
|
surface: string;
|
||||||
reading?: string;
|
reading?: string;
|
||||||
headword?: string;
|
headword?: string;
|
||||||
isNameMatch?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function makeDepsFromYomitanTokens(
|
function makeDepsFromYomitanTokens(
|
||||||
@@ -54,7 +53,6 @@ function makeDepsFromYomitanTokens(
|
|||||||
headword: token.headword ?? token.surface,
|
headword: token.headword ?? token.surface,
|
||||||
startPos,
|
startPos,
|
||||||
endPos,
|
endPos,
|
||||||
isNameMatch: token.isNameMatch ?? false,
|
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -117,20 +115,6 @@ test('tokenizeSubtitle assigns JLPT level to parsed Yomitan tokens', async () =>
|
|||||||
assert.equal(result.tokens?.[0]?.jlptLevel, 'N5');
|
assert.equal(result.tokens?.[0]?.jlptLevel, 'N5');
|
||||||
});
|
});
|
||||||
|
|
||||||
test('tokenizeSubtitle preserves Yomitan name-match metadata on tokens', async () => {
|
|
||||||
const result = await tokenizeSubtitle(
|
|
||||||
'アクアです',
|
|
||||||
makeDepsFromYomitanTokens([
|
|
||||||
{ surface: 'アクア', reading: 'あくあ', headword: 'アクア', isNameMatch: true },
|
|
||||||
{ surface: 'です', reading: 'です', headword: 'です' },
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.equal(result.tokens?.length, 2);
|
|
||||||
assert.equal((result.tokens?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
|
|
||||||
assert.equal((result.tokens?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('tokenizeSubtitle caches JLPT lookups across repeated tokens', async () => {
|
test('tokenizeSubtitle caches JLPT lookups across repeated tokens', async () => {
|
||||||
let lookupCalls = 0;
|
let lookupCalls = 0;
|
||||||
const result = await tokenizeSubtitle(
|
const result = await tokenizeSubtitle(
|
||||||
@@ -1251,30 +1235,6 @@ test('tokenizeSubtitle normalizes newlines before Yomitan parse request', async
|
|||||||
assert.equal(result.tokens, null);
|
assert.equal(result.tokens, null);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('tokenizeSubtitle collapses zero-width separators before Yomitan parse request', async () => {
|
|
||||||
let parseInput = '';
|
|
||||||
const result = await tokenizeSubtitle(
|
|
||||||
'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが!',
|
|
||||||
makeDeps({
|
|
||||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
|
||||||
getYomitanParserWindow: () =>
|
|
||||||
({
|
|
||||||
isDestroyed: () => false,
|
|
||||||
webContents: {
|
|
||||||
executeJavaScript: async (script: string) => {
|
|
||||||
parseInput = script;
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}) as unknown as Electron.BrowserWindow,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.match(parseInput, /キリキリと かかってこい このヘナチョコ冒険者どもめが!/);
|
|
||||||
assert.equal(result.text, 'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが!');
|
|
||||||
assert.equal(result.tokens, null);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('tokenizeSubtitle returns null tokens when Yomitan parsing is unavailable', async () => {
|
test('tokenizeSubtitle returns null tokens when Yomitan parsing is unavailable', async () => {
|
||||||
const result = await tokenizeSubtitle('猫です', makeDeps());
|
const result = await tokenizeSubtitle('猫です', makeDeps());
|
||||||
|
|
||||||
@@ -1861,9 +1821,9 @@ test('tokenizeSubtitle keeps parsing explicit by scanning-parser source only', a
|
|||||||
assert.equal(result.tokens?.[4]?.frequencyRank, 1500);
|
assert.equal(result.tokens?.[4]?.frequencyRank, 1500);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('tokenizeSubtitle still assigns frequency to non-known multi-character Yomitan tokens', async () => {
|
test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', async () => {
|
||||||
const result = await tokenizeSubtitle(
|
const result = await tokenizeSubtitle(
|
||||||
'小園友達',
|
'小園に',
|
||||||
makeDeps({
|
makeDeps({
|
||||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
||||||
getYomitanParserWindow: () =>
|
getYomitanParserWindow: () =>
|
||||||
@@ -1884,9 +1844,9 @@ test('tokenizeSubtitle still assigns frequency to non-known multi-character Yomi
|
|||||||
],
|
],
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
text: '友達',
|
text: 'に',
|
||||||
reading: 'ともだち',
|
reading: 'に',
|
||||||
headwords: [[{ term: '友達' }]],
|
headwords: [[{ term: 'に' }]],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
@@ -1895,7 +1855,7 @@ test('tokenizeSubtitle still assigns frequency to non-known multi-character Yomi
|
|||||||
},
|
},
|
||||||
}) as unknown as Electron.BrowserWindow,
|
}) as unknown as Electron.BrowserWindow,
|
||||||
getFrequencyDictionaryEnabled: () => true,
|
getFrequencyDictionaryEnabled: () => true,
|
||||||
getFrequencyRank: (text) => (text === '小園' ? 75 : text === '友達' ? 3000 : null),
|
getFrequencyRank: (text) => (text === '小園' ? 75 : text === 'に' ? 3000 : null),
|
||||||
isKnownWord: (text) => text === '小園',
|
isKnownWord: (text) => text === '小園',
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
@@ -2635,21 +2595,6 @@ test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and freque
|
|||||||
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
|
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('tokenizeSubtitle excludes single-kana merged tokens from frequency highlighting', async () => {
|
|
||||||
const result = await tokenizeSubtitle(
|
|
||||||
'た',
|
|
||||||
makeDepsFromYomitanTokens([{ surface: 'た', reading: 'た', headword: 'た' }], {
|
|
||||||
getFrequencyDictionaryEnabled: () => true,
|
|
||||||
getFrequencyRank: (text) => (text === 'た' ? 17 : null),
|
|
||||||
getMinSentenceWordsForNPlusOne: () => 1,
|
|
||||||
tokenizeWithMecab: async () => null,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.equal(result.tokens?.length, 1);
|
|
||||||
assert.equal(result.tokens?.[0]?.frequencyRank, undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
|
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
|
||||||
const result = await tokenizeSubtitle(
|
const result = await tokenizeSubtitle(
|
||||||
'になれば',
|
'になれば',
|
||||||
|
|||||||
@@ -44,7 +44,6 @@ export interface TokenizerServiceDeps {
|
|||||||
getJlptLevel: (text: string) => JlptLevel | null;
|
getJlptLevel: (text: string) => JlptLevel | null;
|
||||||
getNPlusOneEnabled?: () => boolean;
|
getNPlusOneEnabled?: () => boolean;
|
||||||
getJlptEnabled?: () => boolean;
|
getJlptEnabled?: () => boolean;
|
||||||
getNameMatchEnabled?: () => boolean;
|
|
||||||
getFrequencyDictionaryEnabled?: () => boolean;
|
getFrequencyDictionaryEnabled?: () => boolean;
|
||||||
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
|
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
|
||||||
getFrequencyRank?: FrequencyDictionaryLookup;
|
getFrequencyRank?: FrequencyDictionaryLookup;
|
||||||
@@ -74,7 +73,6 @@ export interface TokenizerDepsRuntimeOptions {
|
|||||||
getJlptLevel: (text: string) => JlptLevel | null;
|
getJlptLevel: (text: string) => JlptLevel | null;
|
||||||
getNPlusOneEnabled?: () => boolean;
|
getNPlusOneEnabled?: () => boolean;
|
||||||
getJlptEnabled?: () => boolean;
|
getJlptEnabled?: () => boolean;
|
||||||
getNameMatchEnabled?: () => boolean;
|
|
||||||
getFrequencyDictionaryEnabled?: () => boolean;
|
getFrequencyDictionaryEnabled?: () => boolean;
|
||||||
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
|
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
|
||||||
getFrequencyRank?: FrequencyDictionaryLookup;
|
getFrequencyRank?: FrequencyDictionaryLookup;
|
||||||
@@ -87,7 +85,6 @@ export interface TokenizerDepsRuntimeOptions {
|
|||||||
interface TokenizerAnnotationOptions {
|
interface TokenizerAnnotationOptions {
|
||||||
nPlusOneEnabled: boolean;
|
nPlusOneEnabled: boolean;
|
||||||
jlptEnabled: boolean;
|
jlptEnabled: boolean;
|
||||||
nameMatchEnabled: boolean;
|
|
||||||
frequencyEnabled: boolean;
|
frequencyEnabled: boolean;
|
||||||
frequencyMatchMode: FrequencyDictionaryMatchMode;
|
frequencyMatchMode: FrequencyDictionaryMatchMode;
|
||||||
minSentenceWordsForNPlusOne: number | undefined;
|
minSentenceWordsForNPlusOne: number | undefined;
|
||||||
@@ -109,7 +106,6 @@ const DEFAULT_ANNOTATION_POS1_EXCLUSIONS = resolveAnnotationPos1ExclusionSet(
|
|||||||
const DEFAULT_ANNOTATION_POS2_EXCLUSIONS = resolveAnnotationPos2ExclusionSet(
|
const DEFAULT_ANNOTATION_POS2_EXCLUSIONS = resolveAnnotationPos2ExclusionSet(
|
||||||
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
|
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
|
||||||
);
|
);
|
||||||
const INVISIBLE_SEPARATOR_PATTERN = /[\u200b\u2060\ufeff]/g;
|
|
||||||
|
|
||||||
function getKnownWordLookup(
|
function getKnownWordLookup(
|
||||||
deps: TokenizerServiceDeps,
|
deps: TokenizerServiceDeps,
|
||||||
@@ -193,7 +189,6 @@ export function createTokenizerDepsRuntime(
|
|||||||
getJlptLevel: options.getJlptLevel,
|
getJlptLevel: options.getJlptLevel,
|
||||||
getNPlusOneEnabled: options.getNPlusOneEnabled,
|
getNPlusOneEnabled: options.getNPlusOneEnabled,
|
||||||
getJlptEnabled: options.getJlptEnabled,
|
getJlptEnabled: options.getJlptEnabled,
|
||||||
getNameMatchEnabled: options.getNameMatchEnabled,
|
|
||||||
getFrequencyDictionaryEnabled: options.getFrequencyDictionaryEnabled,
|
getFrequencyDictionaryEnabled: options.getFrequencyDictionaryEnabled,
|
||||||
getFrequencyDictionaryMatchMode: options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
|
getFrequencyDictionaryMatchMode: options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
|
||||||
getFrequencyRank: options.getFrequencyRank,
|
getFrequencyRank: options.getFrequencyRank,
|
||||||
@@ -305,7 +300,6 @@ function normalizeSelectedYomitanTokens(tokens: MergedToken[]): MergedToken[] {
|
|||||||
isMerged: token.isMerged ?? true,
|
isMerged: token.isMerged ?? true,
|
||||||
isKnown: token.isKnown ?? false,
|
isKnown: token.isKnown ?? false,
|
||||||
isNPlusOneTarget: token.isNPlusOneTarget ?? false,
|
isNPlusOneTarget: token.isNPlusOneTarget ?? false,
|
||||||
isNameMatch: token.isNameMatch ?? false,
|
|
||||||
reading: normalizeYomitanMergedReading(token),
|
reading: normalizeYomitanMergedReading(token),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
@@ -465,7 +459,6 @@ function getAnnotationOptions(deps: TokenizerServiceDeps): TokenizerAnnotationOp
|
|||||||
return {
|
return {
|
||||||
nPlusOneEnabled: deps.getNPlusOneEnabled?.() !== false,
|
nPlusOneEnabled: deps.getNPlusOneEnabled?.() !== false,
|
||||||
jlptEnabled: deps.getJlptEnabled?.() !== false,
|
jlptEnabled: deps.getJlptEnabled?.() !== false,
|
||||||
nameMatchEnabled: deps.getNameMatchEnabled?.() !== false,
|
|
||||||
frequencyEnabled: deps.getFrequencyDictionaryEnabled?.() !== false,
|
frequencyEnabled: deps.getFrequencyDictionaryEnabled?.() !== false,
|
||||||
frequencyMatchMode: deps.getFrequencyDictionaryMatchMode?.() ?? 'headword',
|
frequencyMatchMode: deps.getFrequencyDictionaryMatchMode?.() ?? 'headword',
|
||||||
minSentenceWordsForNPlusOne: deps.getMinSentenceWordsForNPlusOne?.(),
|
minSentenceWordsForNPlusOne: deps.getMinSentenceWordsForNPlusOne?.(),
|
||||||
@@ -479,9 +472,7 @@ async function parseWithYomitanInternalParser(
|
|||||||
deps: TokenizerServiceDeps,
|
deps: TokenizerServiceDeps,
|
||||||
options: TokenizerAnnotationOptions,
|
options: TokenizerAnnotationOptions,
|
||||||
): Promise<MergedToken[] | null> {
|
): Promise<MergedToken[] | null> {
|
||||||
const selectedTokens = await requestYomitanScanTokens(text, deps, logger, {
|
const selectedTokens = await requestYomitanScanTokens(text, deps, logger);
|
||||||
includeNameMatchMetadata: options.nameMatchEnabled,
|
|
||||||
});
|
|
||||||
if (!selectedTokens || selectedTokens.length === 0) {
|
if (!selectedTokens || selectedTokens.length === 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -497,7 +488,6 @@ async function parseWithYomitanInternalParser(
|
|||||||
isMerged: true,
|
isMerged: true,
|
||||||
isKnown: false,
|
isKnown: false,
|
||||||
isNPlusOneTarget: false,
|
isNPlusOneTarget: false,
|
||||||
isNameMatch: token.isNameMatch ?? false,
|
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
@@ -573,11 +563,7 @@ export async function tokenizeSubtitle(
|
|||||||
return { text, tokens: null };
|
return { text, tokens: null };
|
||||||
}
|
}
|
||||||
|
|
||||||
const tokenizeText = displayText
|
const tokenizeText = displayText.replace(/\n/g, ' ').replace(/\s+/g, ' ').trim();
|
||||||
.replace(INVISIBLE_SEPARATOR_PATTERN, ' ')
|
|
||||||
.replace(/\n/g, ' ')
|
|
||||||
.replace(/\s+/g, ' ')
|
|
||||||
.trim();
|
|
||||||
const annotationOptions = getAnnotationOptions(deps);
|
const annotationOptions = getAnnotationOptions(deps);
|
||||||
|
|
||||||
const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps, annotationOptions);
|
const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps, annotationOptions);
|
||||||
|
|||||||
@@ -252,12 +252,12 @@ test('annotateTokens applies configured pos1 exclusions to both frequency and N+
|
|||||||
test('annotateTokens allows previously default-excluded pos1 when removed from effective set', () => {
|
test('annotateTokens allows previously default-excluded pos1 when removed from effective set', () => {
|
||||||
const tokens = [
|
const tokens = [
|
||||||
makeToken({
|
makeToken({
|
||||||
surface: 'まで',
|
surface: 'は',
|
||||||
headword: 'まで',
|
headword: 'は',
|
||||||
partOfSpeech: PartOfSpeech.other,
|
partOfSpeech: PartOfSpeech.other,
|
||||||
pos1: '助詞',
|
pos1: '助詞',
|
||||||
startPos: 0,
|
startPos: 0,
|
||||||
endPos: 2,
|
endPos: 1,
|
||||||
frequencyRank: 8,
|
frequencyRank: 8,
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
@@ -314,52 +314,6 @@ test('annotateTokens excludes likely kana SFX tokens from frequency when POS tag
|
|||||||
assert.equal(result[0]?.frequencyRank, undefined);
|
assert.equal(result[0]?.frequencyRank, undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('annotateTokens excludes single hiragana and katakana tokens from frequency when POS tags are missing', () => {
|
|
||||||
const tokens = [
|
|
||||||
makeToken({
|
|
||||||
surface: 'た',
|
|
||||||
reading: 'た',
|
|
||||||
headword: 'た',
|
|
||||||
pos1: '',
|
|
||||||
pos2: '',
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
frequencyRank: 21,
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 1,
|
|
||||||
}),
|
|
||||||
makeToken({
|
|
||||||
surface: 'ア',
|
|
||||||
reading: 'ア',
|
|
||||||
headword: 'ア',
|
|
||||||
pos1: '',
|
|
||||||
pos2: '',
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
frequencyRank: 22,
|
|
||||||
startPos: 1,
|
|
||||||
endPos: 2,
|
|
||||||
}),
|
|
||||||
makeToken({
|
|
||||||
surface: '山',
|
|
||||||
reading: 'やま',
|
|
||||||
headword: '山',
|
|
||||||
pos1: '',
|
|
||||||
pos2: '',
|
|
||||||
partOfSpeech: PartOfSpeech.other,
|
|
||||||
frequencyRank: 23,
|
|
||||||
startPos: 2,
|
|
||||||
endPos: 3,
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = annotateTokens(tokens, makeDeps(), {
|
|
||||||
minSentenceWordsForNPlusOne: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(result[0]?.frequencyRank, undefined);
|
|
||||||
assert.equal(result[1]?.frequencyRank, undefined);
|
|
||||||
assert.equal(result[2]?.frequencyRank, 23);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
|
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
|
||||||
const tokens = [
|
const tokens = [
|
||||||
makeToken({
|
makeToken({
|
||||||
|
|||||||
@@ -103,10 +103,6 @@ function isFrequencyExcludedByPos(
|
|||||||
pos1Exclusions: ReadonlySet<string>,
|
pos1Exclusions: ReadonlySet<string>,
|
||||||
pos2Exclusions: ReadonlySet<string>,
|
pos2Exclusions: ReadonlySet<string>,
|
||||||
): boolean {
|
): boolean {
|
||||||
if (isSingleKanaFrequencyNoiseToken(token.surface)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const normalizedPos1 = normalizePos1Tag(token.pos1);
|
const normalizedPos1 = normalizePos1Tag(token.pos1);
|
||||||
const hasPos1 = normalizedPos1.length > 0;
|
const hasPos1 = normalizedPos1.length > 0;
|
||||||
if (isExcludedByTagSet(normalizedPos1, pos1Exclusions)) {
|
if (isExcludedByTagSet(normalizedPos1, pos1Exclusions)) {
|
||||||
@@ -367,20 +363,6 @@ function isLikelyFrequencyNoiseToken(token: MergedToken): boolean {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isSingleKanaFrequencyNoiseToken(text: string | undefined): boolean {
|
|
||||||
if (typeof text !== 'string') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const normalized = text.trim();
|
|
||||||
if (!normalized) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const chars = [...normalized];
|
|
||||||
return chars.length === 1 && isKanaChar(chars[0]!);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isJlptEligibleToken(token: MergedToken): boolean {
|
function isJlptEligibleToken(token: MergedToken): boolean {
|
||||||
if (token.pos1 && shouldIgnoreJlptForMecabPos1(token.pos1)) {
|
if (token.pos1 && shouldIgnoreJlptForMecabPos1(token.pos1)) {
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -3,12 +3,12 @@ import * as fs from 'fs';
|
|||||||
import * as os from 'os';
|
import * as os from 'os';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import * as vm from 'node:vm';
|
|
||||||
import {
|
import {
|
||||||
getYomitanDictionaryInfo,
|
getYomitanDictionaryInfo,
|
||||||
importYomitanDictionaryFromZip,
|
importYomitanDictionaryFromZip,
|
||||||
deleteYomitanDictionaryByTitle,
|
deleteYomitanDictionaryByTitle,
|
||||||
removeYomitanDictionarySettings,
|
removeYomitanDictionarySettings,
|
||||||
|
requestYomitanParseResults,
|
||||||
requestYomitanScanTokens,
|
requestYomitanScanTokens,
|
||||||
requestYomitanTermFrequencies,
|
requestYomitanTermFrequencies,
|
||||||
syncYomitanDefaultAnkiServer,
|
syncYomitanDefaultAnkiServer,
|
||||||
@@ -40,40 +40,6 @@ function createDeps(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function runInjectedYomitanScript(
|
|
||||||
script: string,
|
|
||||||
handler: (action: string, params: unknown) => unknown,
|
|
||||||
): Promise<unknown> {
|
|
||||||
return await vm.runInNewContext(script, {
|
|
||||||
chrome: {
|
|
||||||
runtime: {
|
|
||||||
lastError: null,
|
|
||||||
sendMessage: (
|
|
||||||
payload: { action?: string; params?: unknown },
|
|
||||||
callback: (response: { result?: unknown; error?: { message?: string } }) => void,
|
|
||||||
) => {
|
|
||||||
try {
|
|
||||||
callback({ result: handler(payload.action ?? '', payload.params) });
|
|
||||||
} catch (error) {
|
|
||||||
callback({ error: { message: (error as Error).message } });
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Array,
|
|
||||||
Error,
|
|
||||||
JSON,
|
|
||||||
Map,
|
|
||||||
Math,
|
|
||||||
Number,
|
|
||||||
Object,
|
|
||||||
Promise,
|
|
||||||
RegExp,
|
|
||||||
Set,
|
|
||||||
String,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
test('syncYomitanDefaultAnkiServer updates default profile server when script reports update', async () => {
|
test('syncYomitanDefaultAnkiServer updates default profile server when script reports update', async () => {
|
||||||
let scriptValue = '';
|
let scriptValue = '';
|
||||||
const deps = createDeps(async (script) => {
|
const deps = createDeps(async (script) => {
|
||||||
@@ -485,331 +451,6 @@ test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of
|
|||||||
assert.match(scannerScript ?? '', /deinflect:\s*true/);
|
assert.match(scannerScript ?? '', /deinflect:\s*true/);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('requestYomitanScanTokens marks tokens backed by SubMiner character dictionary entries', async () => {
|
|
||||||
const deps = createDeps(async (script) => {
|
|
||||||
if (script.includes('optionsGetFull')) {
|
|
||||||
return {
|
|
||||||
profileCurrent: 0,
|
|
||||||
profiles: [
|
|
||||||
{
|
|
||||||
options: {
|
|
||||||
scanning: { length: 40 },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
surface: 'アクア',
|
|
||||||
reading: 'あくあ',
|
|
||||||
headword: 'アクア',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 3,
|
|
||||||
isNameMatch: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
surface: 'です',
|
|
||||||
reading: 'です',
|
|
||||||
headword: 'です',
|
|
||||||
startPos: 3,
|
|
||||||
endPos: 5,
|
|
||||||
isNameMatch: false,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await requestYomitanScanTokens('アクアです', deps, {
|
|
||||||
error: () => undefined,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(result?.length, 2);
|
|
||||||
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
|
|
||||||
assert.equal((result?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('requestYomitanScanTokens skips name-match work when disabled', async () => {
|
|
||||||
let scannerScript = '';
|
|
||||||
const deps = createDeps(async (script) => {
|
|
||||||
if (script.includes('termsFind')) {
|
|
||||||
scannerScript = script;
|
|
||||||
}
|
|
||||||
if (script.includes('optionsGetFull')) {
|
|
||||||
return {
|
|
||||||
profileCurrent: 0,
|
|
||||||
profiles: [
|
|
||||||
{
|
|
||||||
options: {
|
|
||||||
scanning: { length: 40 },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
surface: 'アクア',
|
|
||||||
reading: 'あくあ',
|
|
||||||
headword: 'アクア',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await requestYomitanScanTokens(
|
|
||||||
'アクア',
|
|
||||||
deps,
|
|
||||||
{ error: () => undefined },
|
|
||||||
{ includeNameMatchMetadata: false },
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.equal(result?.length, 1);
|
|
||||||
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, undefined);
|
|
||||||
assert.match(scannerScript, /const includeNameMatchMetadata = false;/);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('requestYomitanScanTokens marks grouped entries when SubMiner dictionary alias only exists on definitions', async () => {
|
|
||||||
let scannerScript = '';
|
|
||||||
const deps = createDeps(async (script) => {
|
|
||||||
if (script.includes('termsFind')) {
|
|
||||||
scannerScript = script;
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
if (script.includes('optionsGetFull')) {
|
|
||||||
return {
|
|
||||||
profileCurrent: 0,
|
|
||||||
profiles: [
|
|
||||||
{
|
|
||||||
options: {
|
|
||||||
scanning: { length: 40 },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
await requestYomitanScanTokens(
|
|
||||||
'カズマ',
|
|
||||||
deps,
|
|
||||||
{ error: () => undefined },
|
|
||||||
{ includeNameMatchMetadata: true },
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.match(scannerScript, /getPreferredHeadword/);
|
|
||||||
|
|
||||||
const result = await runInjectedYomitanScript(scannerScript, (action, params) => {
|
|
||||||
if (action === 'termsFind') {
|
|
||||||
const text = (params as { text?: string } | undefined)?.text;
|
|
||||||
if (text === 'カズマ') {
|
|
||||||
return {
|
|
||||||
originalTextLength: 3,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
dictionaryAlias: '',
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: 'カズマ',
|
|
||||||
reading: 'かずま',
|
|
||||||
sources: [{ originalText: 'カズマ', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
definitions: [
|
|
||||||
{ dictionary: 'JMdict', dictionaryAlias: 'JMdict' },
|
|
||||||
{
|
|
||||||
dictionary: 'SubMiner Character Dictionary (AniList 130298)',
|
|
||||||
dictionaryAlias: 'SubMiner Character Dictionary (AniList 130298)',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return { originalTextLength: 0, dictionaryEntries: [] };
|
|
||||||
}
|
|
||||||
throw new Error(`unexpected action: ${action}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.equal(Array.isArray(result), true);
|
|
||||||
assert.equal((result as { length?: number } | null)?.length, 1);
|
|
||||||
assert.equal((result as Array<{ surface?: string }>)[0]?.surface, 'カズマ');
|
|
||||||
assert.equal((result as Array<{ headword?: string }>)[0]?.headword, 'カズマ');
|
|
||||||
assert.equal((result as Array<{ startPos?: number }>)[0]?.startPos, 0);
|
|
||||||
assert.equal((result as Array<{ endPos?: number }>)[0]?.endPos, 3);
|
|
||||||
assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('requestYomitanScanTokens skips fallback fragments without exact primary source matches', async () => {
|
|
||||||
const deps = createDeps(async (script) => {
|
|
||||||
if (script.includes('optionsGetFull')) {
|
|
||||||
return {
|
|
||||||
profileCurrent: 0,
|
|
||||||
profiles: [
|
|
||||||
{
|
|
||||||
options: {
|
|
||||||
scanning: { length: 40 },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return await runInjectedYomitanScript(script, (action, params) => {
|
|
||||||
if (action !== 'termsFind') {
|
|
||||||
throw new Error(`unexpected action: ${action}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const text = (params as { text?: string } | undefined)?.text ?? '';
|
|
||||||
if (text.startsWith('だが ')) {
|
|
||||||
return {
|
|
||||||
originalTextLength: 2,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: 'だが',
|
|
||||||
reading: 'だが',
|
|
||||||
sources: [{ originalText: 'だが', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (text.startsWith('それでも')) {
|
|
||||||
return {
|
|
||||||
originalTextLength: 4,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: 'それでも',
|
|
||||||
reading: 'それでも',
|
|
||||||
sources: [{ originalText: 'それでも', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (text.startsWith('届かぬ')) {
|
|
||||||
return {
|
|
||||||
originalTextLength: 3,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: '届く',
|
|
||||||
reading: 'とどく',
|
|
||||||
sources: [{ originalText: '届かぬ', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (text.startsWith('高み')) {
|
|
||||||
return {
|
|
||||||
originalTextLength: 2,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: '高み',
|
|
||||||
reading: 'たかみ',
|
|
||||||
sources: [{ originalText: '高み', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (text.startsWith('があった')) {
|
|
||||||
return {
|
|
||||||
originalTextLength: 2,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: 'があ',
|
|
||||||
reading: '',
|
|
||||||
sources: [{ originalText: 'が', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (text.startsWith('あった')) {
|
|
||||||
return {
|
|
||||||
originalTextLength: 3,
|
|
||||||
dictionaryEntries: [
|
|
||||||
{
|
|
||||||
headwords: [
|
|
||||||
{
|
|
||||||
term: 'ある',
|
|
||||||
reading: 'ある',
|
|
||||||
sources: [{ originalText: 'あった', isPrimary: true, matchType: 'exact' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return { originalTextLength: 0, dictionaryEntries: [] };
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await requestYomitanScanTokens('だが それでも届かぬ高みがあった', deps, {
|
|
||||||
error: () => undefined,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(
|
|
||||||
result?.map((token) => ({
|
|
||||||
surface: token.surface,
|
|
||||||
headword: token.headword,
|
|
||||||
startPos: token.startPos,
|
|
||||||
endPos: token.endPos,
|
|
||||||
})),
|
|
||||||
[
|
|
||||||
{
|
|
||||||
surface: 'だが',
|
|
||||||
headword: 'だが',
|
|
||||||
startPos: 0,
|
|
||||||
endPos: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
surface: 'それでも',
|
|
||||||
headword: 'それでも',
|
|
||||||
startPos: 3,
|
|
||||||
endPos: 7,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
surface: '届かぬ',
|
|
||||||
headword: '届く',
|
|
||||||
startPos: 7,
|
|
||||||
endPos: 10,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
surface: '高み',
|
|
||||||
headword: '高み',
|
|
||||||
startPos: 10,
|
|
||||||
endPos: 12,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
surface: 'あった',
|
|
||||||
headword: 'ある',
|
|
||||||
startPos: 13,
|
|
||||||
endPos: 16,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
|
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
|
||||||
let scriptValue = '';
|
let scriptValue = '';
|
||||||
const deps = createDeps(async (script) => {
|
const deps = createDeps(async (script) => {
|
||||||
@@ -823,7 +464,7 @@ test('getYomitanDictionaryInfo requests dictionary info via backend action', asy
|
|||||||
assert.match(scriptValue, /getDictionaryInfo/);
|
assert.match(scriptValue, /getDictionaryInfo/);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('dictionary settings helpers upsert and remove dictionary entries without reordering', async () => {
|
test('dictionary settings helpers upsert and remove dictionary entries', async () => {
|
||||||
const scripts: string[] = [];
|
const scripts: string[] = [];
|
||||||
const optionsFull = {
|
const optionsFull = {
|
||||||
profileCurrent: 0,
|
profileCurrent: 0,
|
||||||
@@ -873,8 +514,7 @@ test('dictionary settings helpers upsert and remove dictionary entries without r
|
|||||||
|
|
||||||
const upsertScript = scripts.find(
|
const upsertScript = scripts.find(
|
||||||
(script) =>
|
(script) =>
|
||||||
script.includes('setAllSettings') &&
|
script.includes('setAllSettings') && script.includes('"SubMiner Character Dictionary (AniList 1)"'),
|
||||||
script.includes('"SubMiner Character Dictionary (AniList 1)"'),
|
|
||||||
);
|
);
|
||||||
assert.ok(upsertScript);
|
assert.ok(upsertScript);
|
||||||
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
|
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
|
||||||
@@ -914,18 +554,9 @@ test('importYomitanDictionaryFromZip uses settings automation bridge instead of
|
|||||||
});
|
});
|
||||||
|
|
||||||
assert.equal(imported, true);
|
assert.equal(imported, true);
|
||||||
assert.equal(
|
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
|
||||||
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
|
assert.equal(scripts.some((script) => script.includes('importDictionaryArchiveBase64')), true);
|
||||||
true,
|
assert.equal(scripts.some((script) => script.includes('subminerImportDictionary')), false);
|
||||||
);
|
|
||||||
assert.equal(
|
|
||||||
scripts.some((script) => script.includes('importDictionaryArchiveBase64')),
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
assert.equal(
|
|
||||||
scripts.some((script) => script.includes('subminerImportDictionary')),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
|
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
|
||||||
@@ -955,16 +586,7 @@ test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of
|
|||||||
);
|
);
|
||||||
|
|
||||||
assert.equal(deleted, true);
|
assert.equal(deleted, true);
|
||||||
assert.equal(
|
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
|
||||||
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
|
assert.equal(scripts.some((script) => script.includes('deleteDictionary')), true);
|
||||||
true,
|
assert.equal(scripts.some((script) => script.includes('subminerDeleteDictionary')), false);
|
||||||
);
|
|
||||||
assert.equal(
|
|
||||||
scripts.some((script) => script.includes('deleteDictionary')),
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
assert.equal(
|
|
||||||
scripts.some((script) => script.includes('subminerDeleteDictionary')),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -45,7 +45,6 @@ export interface YomitanScanToken {
|
|||||||
headword: string;
|
headword: string;
|
||||||
startPos: number;
|
startPos: number;
|
||||||
endPos: number;
|
endPos: number;
|
||||||
isNameMatch?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface YomitanProfileMetadata {
|
interface YomitanProfileMetadata {
|
||||||
@@ -76,8 +75,7 @@ function isScanTokenArray(value: unknown): value is YomitanScanToken[] {
|
|||||||
typeof entry.reading === 'string' &&
|
typeof entry.reading === 'string' &&
|
||||||
typeof entry.headword === 'string' &&
|
typeof entry.headword === 'string' &&
|
||||||
typeof entry.startPos === 'number' &&
|
typeof entry.startPos === 'number' &&
|
||||||
typeof entry.endPos === 'number' &&
|
typeof entry.endPos === 'number',
|
||||||
(entry.isNameMatch === undefined || typeof entry.isNameMatch === 'boolean'),
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -562,7 +560,9 @@ async function createYomitanExtensionWindow(
|
|||||||
});
|
});
|
||||||
return window;
|
return window;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`);
|
logger.error(
|
||||||
|
`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`,
|
||||||
|
);
|
||||||
if (!window.isDestroyed()) {
|
if (!window.isDestroyed()) {
|
||||||
window.destroy();
|
window.destroy();
|
||||||
}
|
}
|
||||||
@@ -772,85 +772,24 @@ const YOMITAN_SCANNING_HELPERS = String.raw`
|
|||||||
return segments;
|
return segments;
|
||||||
}
|
}
|
||||||
function getPreferredHeadword(dictionaryEntries, token) {
|
function getPreferredHeadword(dictionaryEntries, token) {
|
||||||
function appendDictionaryNames(target, value) {
|
for (const dictionaryEntry of dictionaryEntries || []) {
|
||||||
if (!value || typeof value !== 'object') {
|
for (const headword of dictionaryEntry.headwords || []) {
|
||||||
return;
|
const validSources = [];
|
||||||
}
|
|
||||||
const candidates = [
|
|
||||||
value.dictionary,
|
|
||||||
value.dictionaryName,
|
|
||||||
value.name,
|
|
||||||
value.title,
|
|
||||||
value.dictionaryTitle,
|
|
||||||
value.dictionaryAlias
|
|
||||||
];
|
|
||||||
for (const candidate of candidates) {
|
|
||||||
if (typeof candidate === 'string' && candidate.trim().length > 0) {
|
|
||||||
target.push(candidate.trim());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function getDictionaryEntryNames(entry) {
|
|
||||||
const names = [];
|
|
||||||
appendDictionaryNames(names, entry);
|
|
||||||
for (const definition of entry?.definitions || []) {
|
|
||||||
appendDictionaryNames(names, definition);
|
|
||||||
}
|
|
||||||
for (const frequency of entry?.frequencies || []) {
|
|
||||||
appendDictionaryNames(names, frequency);
|
|
||||||
}
|
|
||||||
for (const pronunciation of entry?.pronunciations || []) {
|
|
||||||
appendDictionaryNames(names, pronunciation);
|
|
||||||
}
|
|
||||||
return names;
|
|
||||||
}
|
|
||||||
function isNameDictionaryEntry(entry) {
|
|
||||||
if (!includeNameMatchMetadata || !entry || typeof entry !== 'object') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return getDictionaryEntryNames(entry).some((name) => name.startsWith("SubMiner Character Dictionary"));
|
|
||||||
}
|
|
||||||
function hasExactPrimarySource(headword, token) {
|
|
||||||
for (const src of headword.sources || []) {
|
for (const src of headword.sources || []) {
|
||||||
if (src.originalText !== token) { continue; }
|
if (src.originalText !== token) { continue; }
|
||||||
if (!src.isPrimary) { continue; }
|
if (!src.isPrimary) { continue; }
|
||||||
if (src.matchType !== 'exact') { continue; }
|
if (src.matchType !== 'exact') { continue; }
|
||||||
return true;
|
validSources.push(src);
|
||||||
}
|
}
|
||||||
return false;
|
if (validSources.length > 0) { return {term: headword.term, reading: headword.reading}; }
|
||||||
}
|
|
||||||
let matchedNameDictionary = false;
|
|
||||||
if (includeNameMatchMetadata) {
|
|
||||||
for (const dictionaryEntry of dictionaryEntries || []) {
|
|
||||||
if (!isNameDictionaryEntry(dictionaryEntry)) { continue; }
|
|
||||||
for (const headword of dictionaryEntry.headwords || []) {
|
|
||||||
if (!hasExactPrimarySource(headword, token)) { continue; }
|
|
||||||
matchedNameDictionary = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (matchedNameDictionary) { break; }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const dictionaryEntry of dictionaryEntries || []) {
|
const fallback = dictionaryEntries?.[0]?.headwords?.[0];
|
||||||
for (const headword of dictionaryEntry.headwords || []) {
|
return fallback ? {term: fallback.term, reading: fallback.reading} : null;
|
||||||
if (!hasExactPrimarySource(headword, token)) { continue; }
|
|
||||||
return {
|
|
||||||
term: headword.term,
|
|
||||||
reading: headword.reading,
|
|
||||||
isNameMatch: matchedNameDictionary || isNameDictionaryEntry(dictionaryEntry)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
function buildYomitanScanningScript(
|
function buildYomitanScanningScript(text: string, profileIndex: number, scanLength: number): string {
|
||||||
text: string,
|
|
||||||
profileIndex: number,
|
|
||||||
scanLength: number,
|
|
||||||
includeNameMatchMetadata: boolean,
|
|
||||||
): string {
|
|
||||||
return `
|
return `
|
||||||
(async () => {
|
(async () => {
|
||||||
const invoke = (action, params) =>
|
const invoke = (action, params) =>
|
||||||
@@ -872,7 +811,6 @@ function buildYomitanScanningScript(
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
${YOMITAN_SCANNING_HELPERS}
|
${YOMITAN_SCANNING_HELPERS}
|
||||||
const includeNameMatchMetadata = ${includeNameMatchMetadata ? 'true' : 'false'};
|
|
||||||
const text = ${JSON.stringify(text)};
|
const text = ${JSON.stringify(text)};
|
||||||
const details = {matchType: "exact", deinflect: true};
|
const details = {matchType: "exact", deinflect: true};
|
||||||
const tokens = [];
|
const tokens = [];
|
||||||
@@ -896,7 +834,6 @@ ${YOMITAN_SCANNING_HELPERS}
|
|||||||
headword: preferredHeadword.term,
|
headword: preferredHeadword.term,
|
||||||
startPos: i,
|
startPos: i,
|
||||||
endPos: i + originalTextLength,
|
endPos: i + originalTextLength,
|
||||||
isNameMatch: includeNameMatchMetadata && preferredHeadword.isNameMatch === true,
|
|
||||||
});
|
});
|
||||||
i += originalTextLength;
|
i += originalTextLength;
|
||||||
continue;
|
continue;
|
||||||
@@ -1007,9 +944,6 @@ export async function requestYomitanScanTokens(
|
|||||||
text: string,
|
text: string,
|
||||||
deps: YomitanParserRuntimeDeps,
|
deps: YomitanParserRuntimeDeps,
|
||||||
logger: LoggerLike,
|
logger: LoggerLike,
|
||||||
options?: {
|
|
||||||
includeNameMatchMetadata?: boolean;
|
|
||||||
},
|
|
||||||
): Promise<YomitanScanToken[] | null> {
|
): Promise<YomitanScanToken[] | null> {
|
||||||
const yomitanExt = deps.getYomitanExt();
|
const yomitanExt = deps.getYomitanExt();
|
||||||
if (!text || !yomitanExt) {
|
if (!text || !yomitanExt) {
|
||||||
@@ -1028,12 +962,7 @@ export async function requestYomitanScanTokens(
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const rawResult = await parserWindow.webContents.executeJavaScript(
|
const rawResult = await parserWindow.webContents.executeJavaScript(
|
||||||
buildYomitanScanningScript(
|
buildYomitanScanningScript(text, profileIndex, scanLength),
|
||||||
text,
|
|
||||||
profileIndex,
|
|
||||||
scanLength,
|
|
||||||
options?.includeNameMatchMetadata === true,
|
|
||||||
),
|
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
if (isScanTokenArray(rawResult)) {
|
if (isScanTokenArray(rawResult)) {
|
||||||
@@ -1041,15 +970,13 @@ export async function requestYomitanScanTokens(
|
|||||||
}
|
}
|
||||||
if (Array.isArray(rawResult)) {
|
if (Array.isArray(rawResult)) {
|
||||||
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
|
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
|
||||||
return (
|
return selectedTokens?.map((token) => ({
|
||||||
selectedTokens?.map((token) => ({
|
|
||||||
surface: token.surface,
|
surface: token.surface,
|
||||||
reading: token.reading,
|
reading: token.reading,
|
||||||
headword: token.headword,
|
headword: token.headword,
|
||||||
startPos: token.startPos,
|
startPos: token.startPos,
|
||||||
endPos: token.endPos,
|
endPos: token.endPos,
|
||||||
})) ?? null
|
})) ?? null;
|
||||||
);
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -1523,12 +1450,7 @@ export async function getYomitanDictionaryInfo(
|
|||||||
deps: YomitanParserRuntimeDeps,
|
deps: YomitanParserRuntimeDeps,
|
||||||
logger: LoggerLike,
|
logger: LoggerLike,
|
||||||
): Promise<YomitanDictionaryInfo[]> {
|
): Promise<YomitanDictionaryInfo[]> {
|
||||||
const result = await invokeYomitanBackendAction<unknown>(
|
const result = await invokeYomitanBackendAction<unknown>('getDictionaryInfo', undefined, deps, logger);
|
||||||
'getDictionaryInfo',
|
|
||||||
undefined,
|
|
||||||
deps,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
if (!Array.isArray(result)) {
|
if (!Array.isArray(result)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@@ -1551,12 +1473,7 @@ export async function getYomitanSettingsFull(
|
|||||||
deps: YomitanParserRuntimeDeps,
|
deps: YomitanParserRuntimeDeps,
|
||||||
logger: LoggerLike,
|
logger: LoggerLike,
|
||||||
): Promise<Record<string, unknown> | null> {
|
): Promise<Record<string, unknown> | null> {
|
||||||
const result = await invokeYomitanBackendAction<unknown>(
|
const result = await invokeYomitanBackendAction<unknown>('optionsGetFull', undefined, deps, logger);
|
||||||
'optionsGetFull',
|
|
||||||
undefined,
|
|
||||||
deps,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
return isObject(result) ? result : null;
|
return isObject(result) ? result : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1663,7 +1580,7 @@ export async function upsertYomitanDictionarySettings(
|
|||||||
(entry) =>
|
(entry) =>
|
||||||
isObject(entry) &&
|
isObject(entry) &&
|
||||||
typeof (entry as { name?: unknown }).name === 'string' &&
|
typeof (entry as { name?: unknown }).name === 'string' &&
|
||||||
(entry as { name: string }).name.trim() === normalizedTitle,
|
((entry as { name: string }).name.trim() === normalizedTitle),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
@@ -1679,7 +1596,7 @@ export async function upsertYomitanDictionarySettings(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
dictionaries.push(createDefaultDictionarySettings(normalizedTitle, true));
|
dictionaries.unshift(createDefaultDictionarySettings(normalizedTitle, true));
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -90,10 +90,7 @@ export function shouldCopyYomitanExtension(sourceDir: string, targetDir: string)
|
|||||||
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
|
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ensureExtensionCopy(
|
export function ensureExtensionCopy(sourceDir: string, userDataPath: string): {
|
||||||
sourceDir: string,
|
|
||||||
userDataPath: string,
|
|
||||||
): {
|
|
||||||
targetDir: string;
|
targetDir: string;
|
||||||
copied: boolean;
|
copied: boolean;
|
||||||
} {
|
} {
|
||||||
|
|||||||
@@ -75,10 +75,7 @@ test('ensureExtensionCopy refreshes copied extension when display files change',
|
|||||||
assert.equal(result.targetDir, targetDir);
|
assert.equal(result.targetDir, targetDir);
|
||||||
assert.equal(result.copied, true);
|
assert.equal(result.copied, true);
|
||||||
assert.equal(
|
assert.equal(
|
||||||
fs.readFileSync(
|
fs.readFileSync(path.join(targetDir, 'js', 'display', 'structured-content-generator.js'), 'utf8'),
|
||||||
path.join(targetDir, 'js', 'display', 'structured-content-generator.js'),
|
|
||||||
'utf8',
|
|
||||||
),
|
|
||||||
'new display code',
|
'new display code',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,17 +1,13 @@
|
|||||||
import { BrowserWindow, Extension, session } from 'electron';
|
import { BrowserWindow, Extension, session } from 'electron';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
import { createLogger } from '../../logger';
|
import { createLogger } from '../../logger';
|
||||||
import { ensureExtensionCopy } from './yomitan-extension-copy';
|
import { ensureExtensionCopy } from './yomitan-extension-copy';
|
||||||
import {
|
|
||||||
getYomitanExtensionSearchPaths,
|
|
||||||
resolveExistingYomitanExtensionPath,
|
|
||||||
} from './yomitan-extension-paths';
|
|
||||||
|
|
||||||
const logger = createLogger('main:yomitan-extension-loader');
|
const logger = createLogger('main:yomitan-extension-loader');
|
||||||
|
|
||||||
export interface YomitanExtensionLoaderDeps {
|
export interface YomitanExtensionLoaderDeps {
|
||||||
userDataPath: string;
|
userDataPath: string;
|
||||||
extensionPath?: string;
|
|
||||||
getYomitanParserWindow: () => BrowserWindow | null;
|
getYomitanParserWindow: () => BrowserWindow | null;
|
||||||
setYomitanParserWindow: (window: BrowserWindow | null) => void;
|
setYomitanParserWindow: (window: BrowserWindow | null) => void;
|
||||||
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
||||||
@@ -22,17 +18,25 @@ export interface YomitanExtensionLoaderDeps {
|
|||||||
export async function loadYomitanExtension(
|
export async function loadYomitanExtension(
|
||||||
deps: YomitanExtensionLoaderDeps,
|
deps: YomitanExtensionLoaderDeps,
|
||||||
): Promise<Extension | null> {
|
): Promise<Extension | null> {
|
||||||
const searchPaths = getYomitanExtensionSearchPaths({
|
const searchPaths = [
|
||||||
explicitPath: deps.extensionPath,
|
path.join(__dirname, '..', '..', 'vendor', 'yomitan'),
|
||||||
moduleDir: __dirname,
|
path.join(__dirname, '..', '..', '..', 'vendor', 'yomitan'),
|
||||||
resourcesPath: process.resourcesPath,
|
path.join(process.resourcesPath, 'yomitan'),
|
||||||
userDataPath: deps.userDataPath,
|
'/usr/share/SubMiner/yomitan',
|
||||||
});
|
path.join(deps.userDataPath, 'yomitan'),
|
||||||
let extPath = resolveExistingYomitanExtensionPath(searchPaths, fs.existsSync);
|
];
|
||||||
|
|
||||||
|
let extPath: string | null = null;
|
||||||
|
for (const p of searchPaths) {
|
||||||
|
if (fs.existsSync(p)) {
|
||||||
|
extPath = p;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!extPath) {
|
if (!extPath) {
|
||||||
logger.error('Yomitan extension not found in any search path');
|
logger.error('Yomitan extension not found in any search path');
|
||||||
logger.error('Run `bun run build:yomitan` or install Yomitan to one of:', searchPaths);
|
logger.error('Install Yomitan to one of:', searchPaths);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,50 +0,0 @@
|
|||||||
import assert from 'node:assert/strict';
|
|
||||||
import path from 'node:path';
|
|
||||||
import test from 'node:test';
|
|
||||||
|
|
||||||
import {
|
|
||||||
getYomitanExtensionSearchPaths,
|
|
||||||
resolveExistingYomitanExtensionPath,
|
|
||||||
} from './yomitan-extension-paths';
|
|
||||||
|
|
||||||
test('getYomitanExtensionSearchPaths prioritizes generated build output before packaged fallbacks', () => {
|
|
||||||
const searchPaths = getYomitanExtensionSearchPaths({
|
|
||||||
cwd: '/repo',
|
|
||||||
moduleDir: '/repo/dist/core/services',
|
|
||||||
resourcesPath: '/opt/SubMiner/resources',
|
|
||||||
userDataPath: '/Users/kyle/.config/SubMiner',
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(searchPaths, [
|
|
||||||
path.join('/repo', 'build', 'yomitan'),
|
|
||||||
path.join('/opt/SubMiner/resources', 'yomitan'),
|
|
||||||
'/usr/share/SubMiner/yomitan',
|
|
||||||
path.join('/Users/kyle/.config/SubMiner', 'yomitan'),
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('resolveExistingYomitanExtensionPath returns first manifest-backed candidate', () => {
|
|
||||||
const existing = new Set<string>([
|
|
||||||
path.join('/repo', 'build', 'yomitan', 'manifest.json'),
|
|
||||||
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext', 'manifest.json'),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const resolved = resolveExistingYomitanExtensionPath(
|
|
||||||
[
|
|
||||||
path.join('/repo', 'build', 'yomitan'),
|
|
||||||
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext'),
|
|
||||||
],
|
|
||||||
(candidate) => existing.has(candidate),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.equal(resolved, path.join('/repo', 'build', 'yomitan'));
|
|
||||||
});
|
|
||||||
|
|
||||||
test('resolveExistingYomitanExtensionPath ignores source tree without built manifest', () => {
|
|
||||||
const resolved = resolveExistingYomitanExtensionPath(
|
|
||||||
[path.join('/repo', 'vendor', 'subminer-yomitan', 'ext')],
|
|
||||||
() => false,
|
|
||||||
);
|
|
||||||
|
|
||||||
assert.equal(resolved, null);
|
|
||||||
});
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import * as fs from 'node:fs';
|
|
||||||
import * as path from 'node:path';
|
|
||||||
|
|
||||||
export interface YomitanExtensionPathOptions {
|
|
||||||
explicitPath?: string;
|
|
||||||
cwd?: string;
|
|
||||||
moduleDir?: string;
|
|
||||||
resourcesPath?: string;
|
|
||||||
userDataPath?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function pushUnique(values: string[], candidate: string | null | undefined): void {
|
|
||||||
if (!candidate || values.includes(candidate)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
values.push(candidate);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getYomitanExtensionSearchPaths(
|
|
||||||
options: YomitanExtensionPathOptions = {},
|
|
||||||
): string[] {
|
|
||||||
const searchPaths: string[] = [];
|
|
||||||
|
|
||||||
pushUnique(searchPaths, options.explicitPath ? path.resolve(options.explicitPath) : null);
|
|
||||||
pushUnique(searchPaths, options.cwd ? path.resolve(options.cwd, 'build', 'yomitan') : null);
|
|
||||||
pushUnique(
|
|
||||||
searchPaths,
|
|
||||||
options.moduleDir
|
|
||||||
? path.resolve(options.moduleDir, '..', '..', '..', 'build', 'yomitan')
|
|
||||||
: null,
|
|
||||||
);
|
|
||||||
pushUnique(
|
|
||||||
searchPaths,
|
|
||||||
options.resourcesPath ? path.join(options.resourcesPath, 'yomitan') : null,
|
|
||||||
);
|
|
||||||
pushUnique(searchPaths, '/usr/share/SubMiner/yomitan');
|
|
||||||
pushUnique(searchPaths, options.userDataPath ? path.join(options.userDataPath, 'yomitan') : null);
|
|
||||||
|
|
||||||
return searchPaths;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveExistingYomitanExtensionPath(
|
|
||||||
searchPaths: string[],
|
|
||||||
existsSync: (path: string) => boolean = fs.existsSync,
|
|
||||||
): string | null {
|
|
||||||
for (const candidate of searchPaths) {
|
|
||||||
if (existsSync(path.join(candidate, 'manifest.json'))) {
|
|
||||||
return candidate;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveYomitanExtensionPath(
|
|
||||||
options: YomitanExtensionPathOptions = {},
|
|
||||||
existsSync: (path: string) => boolean = fs.existsSync,
|
|
||||||
): string | null {
|
|
||||||
return resolveExistingYomitanExtensionPath(getYomitanExtensionSearchPaths(options), existsSync);
|
|
||||||
}
|
|
||||||
@@ -2,7 +2,6 @@ import assert from 'node:assert/strict';
|
|||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import { pathToFileURL } from 'node:url';
|
import { pathToFileURL } from 'node:url';
|
||||||
import { resolveYomitanExtensionPath } from './yomitan-extension-paths';
|
|
||||||
|
|
||||||
class FakeStyle {
|
class FakeStyle {
|
||||||
private values = new Map<string, string>();
|
private values = new Map<string, string>();
|
||||||
@@ -156,14 +155,15 @@ function findFirstByClass(node: FakeNode, className: string): FakeNode | null {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test('StructuredContentGenerator uses direct img loading for popup glossary images', async () => {
|
test('StructuredContentGenerator uses direct img loading for popup glossary images', async () => {
|
||||||
const yomitanRoot = resolveYomitanExtensionPath({ cwd: process.cwd() });
|
|
||||||
assert.ok(yomitanRoot, 'Run `bun run build:yomitan` before Yomitan integration tests.');
|
|
||||||
|
|
||||||
const { DisplayContentManager } = await import(
|
const { DisplayContentManager } = await import(
|
||||||
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'display-content-manager.js')).href
|
pathToFileURL(
|
||||||
|
path.join(process.cwd(), 'vendor/yomitan/js/display/display-content-manager.js'),
|
||||||
|
).href
|
||||||
);
|
);
|
||||||
const { StructuredContentGenerator } = await import(
|
const { StructuredContentGenerator } = await import(
|
||||||
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'structured-content-generator.js')).href
|
pathToFileURL(
|
||||||
|
path.join(process.cwd(), 'vendor/yomitan/js/display/structured-content-generator.js'),
|
||||||
|
).href
|
||||||
);
|
);
|
||||||
|
|
||||||
const createObjectURLCalls: string[] = [];
|
const createObjectURLCalls: string[] = [];
|
||||||
@@ -197,10 +197,14 @@ test('StructuredContentGenerator uses direct img loading for popup glossary imag
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const generator = new StructuredContentGenerator(manager, new FakeDocument(), {
|
const generator = new StructuredContentGenerator(
|
||||||
|
manager,
|
||||||
|
new FakeDocument(),
|
||||||
|
{
|
||||||
devicePixelRatio: 1,
|
devicePixelRatio: 1,
|
||||||
navigator: { userAgent: 'Mozilla/5.0' },
|
navigator: { userAgent: 'Mozilla/5.0' },
|
||||||
});
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const node = generator.createDefinitionImage(
|
const node = generator.createDefinitionImage(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,70 +0,0 @@
|
|||||||
import test from 'node:test';
|
|
||||||
import assert from 'node:assert/strict';
|
|
||||||
import fs from 'node:fs';
|
|
||||||
import path from 'node:path';
|
|
||||||
|
|
||||||
const DEAD_MODULE_PATHS = [
|
|
||||||
'src/translators/index.ts',
|
|
||||||
'src/subsync/engines.ts',
|
|
||||||
'src/subtitle/pipeline.ts',
|
|
||||||
'src/subtitle/stages/merge.ts',
|
|
||||||
'src/subtitle/stages/normalize.ts',
|
|
||||||
'src/subtitle/stages/normalize.test.ts',
|
|
||||||
'src/subtitle/stages/tokenize.ts',
|
|
||||||
'src/tokenizers/index.ts',
|
|
||||||
'src/token-mergers/index.ts',
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
const FORBIDDEN_IMPORT_PATTERNS = [
|
|
||||||
/from ['"]\.\.?\/tokenizers['"]/,
|
|
||||||
/from ['"]\.\.?\/token-mergers['"]/,
|
|
||||||
/from ['"]\.\.?\/subtitle\/pipeline['"]/,
|
|
||||||
/from ['"]\.\.?\/subsync\/engines['"]/,
|
|
||||||
/from ['"]\.\.?\/translators['"]/,
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
function readWorkspaceFile(relativePath: string): string {
|
|
||||||
return fs.readFileSync(path.join(process.cwd(), relativePath), 'utf8');
|
|
||||||
}
|
|
||||||
|
|
||||||
function collectSourceFiles(rootDir: string): string[] {
|
|
||||||
const absoluteRoot = path.join(process.cwd(), rootDir);
|
|
||||||
const out: string[] = [];
|
|
||||||
|
|
||||||
const visit = (currentDir: string) => {
|
|
||||||
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
|
|
||||||
const fullPath = path.join(currentDir, entry.name);
|
|
||||||
if (entry.isDirectory()) {
|
|
||||||
visit(fullPath);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
out.push(path.relative(process.cwd(), fullPath).replaceAll('\\', '/'));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
visit(absoluteRoot);
|
|
||||||
out.sort();
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
test('dead registry and pipeline modules stay removed from the repository', () => {
|
|
||||||
for (const relativePath of DEAD_MODULE_PATHS) {
|
|
||||||
assert.equal(
|
|
||||||
fs.existsSync(path.join(process.cwd(), relativePath)),
|
|
||||||
false,
|
|
||||||
`${relativePath} should stay deleted`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('live source tree no longer imports dead registry and pipeline modules', () => {
|
|
||||||
for (const relativePath of collectSourceFiles('src')) {
|
|
||||||
const source = readWorkspaceFile(relativePath);
|
|
||||||
for (const pattern of FORBIDDEN_IMPORT_PATTERNS) {
|
|
||||||
assert.doesNotMatch(source, pattern, `${relativePath} should not import ${pattern.source}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
import assert from 'node:assert/strict';
|
|
||||||
import fs from 'node:fs';
|
|
||||||
import path from 'node:path';
|
|
||||||
import test from 'node:test';
|
|
||||||
import {
|
|
||||||
resolveConfigExampleOutputPaths,
|
|
||||||
writeConfigExampleArtifacts,
|
|
||||||
} from './generate-config-example';
|
|
||||||
|
|
||||||
function createWorkspace(name: string): string {
|
|
||||||
const baseDir = path.join(process.cwd(), '.tmp', 'generate-config-example-test');
|
|
||||||
fs.mkdirSync(baseDir, { recursive: true });
|
|
||||||
return fs.mkdtempSync(path.join(baseDir, `${name}-`));
|
|
||||||
}
|
|
||||||
|
|
||||||
test('resolveConfigExampleOutputPaths includes sibling docs repo and never local docs/public', () => {
|
|
||||||
const workspace = createWorkspace('with-docs-repo');
|
|
||||||
const projectRoot = path.join(workspace, 'SubMiner');
|
|
||||||
const docsRepoRoot = path.join(workspace, 'subminer-docs');
|
|
||||||
|
|
||||||
fs.mkdirSync(projectRoot, { recursive: true });
|
|
||||||
fs.mkdirSync(docsRepoRoot, { recursive: true });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const outputPaths = resolveConfigExampleOutputPaths({ cwd: projectRoot });
|
|
||||||
|
|
||||||
assert.deepEqual(outputPaths, [
|
|
||||||
path.join(projectRoot, 'config.example.jsonc'),
|
|
||||||
path.join(docsRepoRoot, 'public', 'config.example.jsonc'),
|
|
||||||
]);
|
|
||||||
assert.equal(
|
|
||||||
outputPaths.includes(path.join(projectRoot, 'docs', 'public', 'config.example.jsonc')),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
fs.rmSync(workspace, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('resolveConfigExampleOutputPaths stays repo-local when sibling docs repo is absent', () => {
|
|
||||||
const workspace = createWorkspace('without-docs-repo');
|
|
||||||
const projectRoot = path.join(workspace, 'SubMiner');
|
|
||||||
|
|
||||||
fs.mkdirSync(projectRoot, { recursive: true });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const outputPaths = resolveConfigExampleOutputPaths({ cwd: projectRoot });
|
|
||||||
|
|
||||||
assert.deepEqual(outputPaths, [path.join(projectRoot, 'config.example.jsonc')]);
|
|
||||||
} finally {
|
|
||||||
fs.rmSync(workspace, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('writeConfigExampleArtifacts creates parent directories for resolved outputs', () => {
|
|
||||||
const workspace = createWorkspace('write-artifacts');
|
|
||||||
const projectRoot = path.join(workspace, 'SubMiner');
|
|
||||||
const docsRepoRoot = path.join(workspace, 'subminer-docs');
|
|
||||||
const template = '{\n "ok": true\n}\n';
|
|
||||||
|
|
||||||
fs.mkdirSync(projectRoot, { recursive: true });
|
|
||||||
fs.mkdirSync(docsRepoRoot, { recursive: true });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const writtenPaths = writeConfigExampleArtifacts(template, {
|
|
||||||
cwd: projectRoot,
|
|
||||||
deps: { log: () => {} },
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(writtenPaths, [
|
|
||||||
path.join(projectRoot, 'config.example.jsonc'),
|
|
||||||
path.join(docsRepoRoot, 'public', 'config.example.jsonc'),
|
|
||||||
]);
|
|
||||||
assert.equal(fs.readFileSync(path.join(projectRoot, 'config.example.jsonc'), 'utf8'), template);
|
|
||||||
assert.equal(
|
|
||||||
fs.readFileSync(path.join(docsRepoRoot, 'public', 'config.example.jsonc'), 'utf8'),
|
|
||||||
template,
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
fs.rmSync(workspace, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
@@ -2,62 +2,18 @@ import * as fs from 'fs';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { DEFAULT_CONFIG, generateConfigTemplate } from './config';
|
import { DEFAULT_CONFIG, generateConfigTemplate } from './config';
|
||||||
|
|
||||||
type ConfigExampleFsDeps = {
|
|
||||||
existsSync?: (candidate: string) => boolean;
|
|
||||||
mkdirSync?: (candidate: string, options: { recursive: true }) => void;
|
|
||||||
writeFileSync?: (candidate: string, content: string, encoding: BufferEncoding) => void;
|
|
||||||
log?: (message: string) => void;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function resolveConfigExampleOutputPaths(options?: {
|
|
||||||
cwd?: string;
|
|
||||||
docsRepoName?: string;
|
|
||||||
existsSync?: (candidate: string) => boolean;
|
|
||||||
}): string[] {
|
|
||||||
const cwd = options?.cwd ?? process.cwd();
|
|
||||||
const existsSync = options?.existsSync ?? fs.existsSync;
|
|
||||||
const docsRepoName = options?.docsRepoName ?? 'subminer-docs';
|
|
||||||
const outputPaths = [path.join(cwd, 'config.example.jsonc')];
|
|
||||||
const docsRepoRoot = path.resolve(cwd, '..', docsRepoName);
|
|
||||||
|
|
||||||
if (existsSync(docsRepoRoot)) {
|
|
||||||
outputPaths.push(path.join(docsRepoRoot, 'public', 'config.example.jsonc'));
|
|
||||||
}
|
|
||||||
|
|
||||||
return outputPaths;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function writeConfigExampleArtifacts(
|
|
||||||
template: string,
|
|
||||||
options?: {
|
|
||||||
cwd?: string;
|
|
||||||
docsRepoName?: string;
|
|
||||||
deps?: ConfigExampleFsDeps;
|
|
||||||
},
|
|
||||||
): string[] {
|
|
||||||
const mkdirSync = options?.deps?.mkdirSync ?? fs.mkdirSync;
|
|
||||||
const writeFileSync = options?.deps?.writeFileSync ?? fs.writeFileSync;
|
|
||||||
const log = options?.deps?.log ?? console.log;
|
|
||||||
const outputPaths = resolveConfigExampleOutputPaths({
|
|
||||||
cwd: options?.cwd,
|
|
||||||
docsRepoName: options?.docsRepoName,
|
|
||||||
existsSync: options?.deps?.existsSync,
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const outputPath of outputPaths) {
|
|
||||||
mkdirSync(path.dirname(outputPath), { recursive: true });
|
|
||||||
writeFileSync(outputPath, template, 'utf-8');
|
|
||||||
log(`Generated ${outputPath}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return outputPaths;
|
|
||||||
}
|
|
||||||
|
|
||||||
function main(): void {
|
function main(): void {
|
||||||
const template = generateConfigTemplate(DEFAULT_CONFIG);
|
const template = generateConfigTemplate(DEFAULT_CONFIG);
|
||||||
writeConfigExampleArtifacts(template);
|
const outputPaths = [
|
||||||
|
path.join(process.cwd(), 'config.example.jsonc'),
|
||||||
|
path.join(process.cwd(), 'docs', 'public', 'config.example.jsonc'),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const outputPath of outputPaths) {
|
||||||
|
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||||
|
fs.writeFileSync(outputPath, template, 'utf-8');
|
||||||
|
console.log(`Generated ${outputPath}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (require.main === module) {
|
|
||||||
main();
|
main();
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,35 +1,12 @@
|
|||||||
import assert from 'node:assert/strict';
|
import assert from 'node:assert/strict';
|
||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import {
|
import {
|
||||||
normalizeStartupArgv,
|
|
||||||
sanitizeHelpEnv,
|
sanitizeHelpEnv,
|
||||||
sanitizeStartupEnv,
|
|
||||||
sanitizeBackgroundEnv,
|
sanitizeBackgroundEnv,
|
||||||
shouldDetachBackgroundLaunch,
|
shouldDetachBackgroundLaunch,
|
||||||
shouldHandleHelpOnlyAtEntry,
|
shouldHandleHelpOnlyAtEntry,
|
||||||
} from './main-entry-runtime';
|
} from './main-entry-runtime';
|
||||||
|
|
||||||
test('normalizeStartupArgv defaults no-arg startup to --start --background', () => {
|
|
||||||
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage'], {}), [
|
|
||||||
'SubMiner.AppImage',
|
|
||||||
'--start',
|
|
||||||
'--background',
|
|
||||||
]);
|
|
||||||
assert.deepEqual(
|
|
||||||
normalizeStartupArgv(['SubMiner.AppImage', '--password-store', 'gnome-libsecret'], {}),
|
|
||||||
['SubMiner.AppImage', '--password-store', 'gnome-libsecret', '--start', '--background'],
|
|
||||||
);
|
|
||||||
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--background'], {}), [
|
|
||||||
'SubMiner.AppImage',
|
|
||||||
'--background',
|
|
||||||
'--start',
|
|
||||||
]);
|
|
||||||
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--help'], {}), [
|
|
||||||
'SubMiner.AppImage',
|
|
||||||
'--help',
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
|
test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
|
||||||
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], {}), true);
|
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], {}), true);
|
||||||
assert.equal(shouldHandleHelpOnlyAtEntry(['--help', '--start'], {}), false);
|
assert.equal(shouldHandleHelpOnlyAtEntry(['--help', '--start'], {}), false);
|
||||||
@@ -37,14 +14,6 @@ test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
|
|||||||
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], { ELECTRON_RUN_AS_NODE: '1' }), false);
|
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], { ELECTRON_RUN_AS_NODE: '1' }), false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('sanitizeStartupEnv suppresses warnings and lsfg layer', () => {
|
|
||||||
const env = sanitizeStartupEnv({
|
|
||||||
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
|
|
||||||
});
|
|
||||||
assert.equal(env.NODE_NO_WARNINGS, '1');
|
|
||||||
assert.equal('VK_INSTANCE_LAYERS' in env, false);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('sanitizeHelpEnv suppresses warnings and lsfg layer', () => {
|
test('sanitizeHelpEnv suppresses warnings and lsfg layer', () => {
|
||||||
const env = sanitizeHelpEnv({
|
const env = sanitizeHelpEnv({
|
||||||
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
|
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user